diff --git a/autogpt_platform/autogpt_libs/autogpt_libs/supabase_integration_credentials_store/types.py b/autogpt_platform/autogpt_libs/autogpt_libs/supabase_integration_credentials_store/types.py new file mode 100644 index 0000000000..04c6fa2a77 --- /dev/null +++ b/autogpt_platform/autogpt_libs/autogpt_libs/supabase_integration_credentials_store/types.py @@ -0,0 +1,76 @@ +from typing import Annotated, Any, Literal, Optional, TypedDict +from uuid import uuid4 + +from pydantic import BaseModel, Field, SecretStr, field_serializer + + +class _BaseCredentials(BaseModel): + id: str = Field(default_factory=lambda: str(uuid4())) + provider: str + title: Optional[str] + + @field_serializer("*") + def dump_secret_strings(value: Any, _info): + if isinstance(value, SecretStr): + return value.get_secret_value() + return value + + +class OAuth2Credentials(_BaseCredentials): + type: Literal["oauth2"] = "oauth2" + username: Optional[str] + """Username of the third-party service user that these credentials belong to""" + access_token: SecretStr + access_token_expires_at: Optional[int] + """Unix timestamp (seconds) indicating when the access token expires (if at all)""" + refresh_token: Optional[SecretStr] + refresh_token_expires_at: Optional[int] + """Unix timestamp (seconds) indicating when the refresh token expires (if at all)""" + scopes: list[str] + metadata: dict[str, Any] = Field(default_factory=dict) + + def bearer(self) -> str: + return f"Bearer {self.access_token.get_secret_value()}" + + +class APIKeyCredentials(_BaseCredentials): + type: Literal["api_key"] = "api_key" + api_key: SecretStr + expires_at: Optional[int] + """Unix timestamp (seconds) indicating when the API key expires (if at all)""" + + def bearer(self) -> str: + return f"Bearer {self.api_key.get_secret_value()}" + + +Credentials = Annotated[ + OAuth2Credentials | APIKeyCredentials, + Field(discriminator="type"), +] + + +CredentialsType = Literal["api_key", "oauth2"] + + +class OAuthState(BaseModel): + token: str + provider: str + expires_at: int + code_verifier: Optional[str] = None + scopes: list[str] + """Unix timestamp (seconds) indicating when this OAuth state expires""" + + +class UserMetadata(BaseModel): + integration_credentials: list[Credentials] = Field(default_factory=list) + integration_oauth_states: list[OAuthState] = Field(default_factory=list) + + +class UserMetadataRaw(TypedDict, total=False): + integration_credentials: list[dict] + integration_oauth_states: list[dict] + + +class UserIntegrations(BaseModel): + credentials: list[Credentials] = Field(default_factory=list) + oauth_states: list[OAuthState] = Field(default_factory=list) diff --git a/autogpt_platform/backend/.env.example b/autogpt_platform/backend/.env.example index 0dd10e8385..555de676c8 100644 --- a/autogpt_platform/backend/.env.example +++ b/autogpt_platform/backend/.env.example @@ -58,6 +58,21 @@ GITHUB_CLIENT_SECRET= GOOGLE_CLIENT_ID= GOOGLE_CLIENT_SECRET= +# Twitter (X) OAuth 2.0 with PKCE Configuration +# 1. Create a Twitter Developer Account: +# - Visit https://developer.x.com/en and sign up +# 2. Set up your application: +# - Navigate to Developer Portal > Projects > Create Project +# - Add a new app to your project +# 3. Configure app settings: +# - App Permissions: Read + Write + Direct Messages +# - App Type: Web App, Automated App or Bot +# - OAuth 2.0 Callback URL: http://localhost:3000/auth/integrations/oauth_callback +# - Save your Client ID and Client Secret below +TWITTER_CLIENT_ID= +TWITTER_CLIENT_SECRET= + + ## ===== OPTIONAL API KEYS ===== ## # LLM diff --git a/autogpt_platform/backend/backend/blocks/twitter/_auth.py b/autogpt_platform/backend/backend/blocks/twitter/_auth.py new file mode 100644 index 0000000000..0bff03fa37 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/_auth.py @@ -0,0 +1,60 @@ +from typing import Literal + +from pydantic import SecretStr + +from backend.data.model import ( + CredentialsField, + CredentialsMetaInput, + OAuth2Credentials, + ProviderName, +) +from backend.integrations.oauth.twitter import TwitterOAuthHandler +from backend.util.settings import Secrets + +# --8<-- [start:TwitterOAuthIsConfigured] +secrets = Secrets() +TWITTER_OAUTH_IS_CONFIGURED = bool( + secrets.twitter_client_id and secrets.twitter_client_secret +) +# --8<-- [end:TwitterOAuthIsConfigured] + +TwitterCredentials = OAuth2Credentials +TwitterCredentialsInput = CredentialsMetaInput[ + Literal[ProviderName.TWITTER], Literal["oauth2"] +] + + +# Currently, We are getting all the permission from the Twitter API initally +# In future, If we need to add incremental permission, we can use these requested_scopes +def TwitterCredentialsField(scopes: list[str]) -> TwitterCredentialsInput: + """ + Creates a Twitter credentials input on a block. + + Params: + scopes: The authorization scopes needed for the block to work. + """ + return CredentialsField( + # required_scopes=set(scopes), + required_scopes=set(TwitterOAuthHandler.DEFAULT_SCOPES + scopes), + description="The Twitter integration requires OAuth2 authentication.", + ) + + +TEST_CREDENTIALS = OAuth2Credentials( + id="01234567-89ab-cdef-0123-456789abcdef", + provider="twitter", + access_token=SecretStr("mock-twitter-access-token"), + refresh_token=SecretStr("mock-twitter-refresh-token"), + access_token_expires_at=1234567890, + scopes=["tweet.read", "tweet.write", "users.read", "offline.access"], + title="Mock Twitter OAuth2 Credentials", + username="mock-twitter-username", + refresh_token_expires_at=1234567890, +) + +TEST_CREDENTIALS_INPUT = { + "provider": TEST_CREDENTIALS.provider, + "id": TEST_CREDENTIALS.id, + "type": TEST_CREDENTIALS.type, + "title": TEST_CREDENTIALS.title, +} diff --git a/autogpt_platform/backend/backend/blocks/twitter/_builders.py b/autogpt_platform/backend/backend/blocks/twitter/_builders.py new file mode 100644 index 0000000000..6dc450c247 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/_builders.py @@ -0,0 +1,418 @@ +from datetime import datetime +from typing import Any, Dict + +from backend.blocks.twitter._mappers import ( + get_backend_expansion, + get_backend_field, + get_backend_list_expansion, + get_backend_list_field, + get_backend_media_field, + get_backend_place_field, + get_backend_poll_field, + get_backend_space_expansion, + get_backend_space_field, + get_backend_user_field, +) +from backend.blocks.twitter._types import ( # DMEventFieldFilter, + DMEventExpansionFilter, + DMEventTypeFilter, + DMMediaFieldFilter, + DMTweetFieldFilter, + ExpansionFilter, + ListExpansionsFilter, + ListFieldsFilter, + SpaceExpansionsFilter, + SpaceFieldsFilter, + TweetFieldsFilter, + TweetMediaFieldsFilter, + TweetPlaceFieldsFilter, + TweetPollFieldsFilter, + TweetReplySettingsFilter, + TweetUserFieldsFilter, + UserExpansionsFilter, +) + + +# Common Builder +class TweetExpansionsBuilder: + def __init__(self, param: Dict[str, Any]): + self.params: Dict[str, Any] = param + + def add_expansions(self, expansions: ExpansionFilter | None): + if expansions: + filtered_expansions = [ + name for name, value in expansions.dict().items() if value is True + ] + + if filtered_expansions: + self.params["expansions"] = ",".join( + [get_backend_expansion(exp) for exp in filtered_expansions] + ) + + return self + + def add_media_fields(self, media_fields: TweetMediaFieldsFilter | None): + if media_fields: + filtered_fields = [ + name for name, value in media_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["media.fields"] = ",".join( + [get_backend_media_field(field) for field in filtered_fields] + ) + return self + + def add_place_fields(self, place_fields: TweetPlaceFieldsFilter | None): + if place_fields: + filtered_fields = [ + name for name, value in place_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["place.fields"] = ",".join( + [get_backend_place_field(field) for field in filtered_fields] + ) + return self + + def add_poll_fields(self, poll_fields: TweetPollFieldsFilter | None): + if poll_fields: + filtered_fields = [ + name for name, value in poll_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["poll.fields"] = ",".join( + [get_backend_poll_field(field) for field in filtered_fields] + ) + return self + + def add_tweet_fields(self, tweet_fields: TweetFieldsFilter | None): + if tweet_fields: + filtered_fields = [ + name for name, value in tweet_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["tweet.fields"] = ",".join( + [get_backend_field(field) for field in filtered_fields] + ) + return self + + def add_user_fields(self, user_fields: TweetUserFieldsFilter | None): + if user_fields: + filtered_fields = [ + name for name, value in user_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["user.fields"] = ",".join( + [get_backend_user_field(field) for field in filtered_fields] + ) + return self + + def build(self): + return self.params + + +class UserExpansionsBuilder: + def __init__(self, param: Dict[str, Any]): + self.params: Dict[str, Any] = param + + def add_expansions(self, expansions: UserExpansionsFilter | None): + if expansions: + filtered_expansions = [ + name for name, value in expansions.dict().items() if value is True + ] + if filtered_expansions: + self.params["expansions"] = ",".join(filtered_expansions) + return self + + def add_tweet_fields(self, tweet_fields: TweetFieldsFilter | None): + if tweet_fields: + filtered_fields = [ + name for name, value in tweet_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["tweet.fields"] = ",".join( + [get_backend_field(field) for field in filtered_fields] + ) + return self + + def add_user_fields(self, user_fields: TweetUserFieldsFilter | None): + if user_fields: + filtered_fields = [ + name for name, value in user_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["user.fields"] = ",".join( + [get_backend_user_field(field) for field in filtered_fields] + ) + return self + + def build(self): + return self.params + + +class ListExpansionsBuilder: + def __init__(self, param: Dict[str, Any]): + self.params: Dict[str, Any] = param + + def add_expansions(self, expansions: ListExpansionsFilter | None): + if expansions: + filtered_expansions = [ + name for name, value in expansions.dict().items() if value is True + ] + if filtered_expansions: + self.params["expansions"] = ",".join( + [get_backend_list_expansion(exp) for exp in filtered_expansions] + ) + return self + + def add_list_fields(self, list_fields: ListFieldsFilter | None): + if list_fields: + filtered_fields = [ + name for name, value in list_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["list.fields"] = ",".join( + [get_backend_list_field(field) for field in filtered_fields] + ) + return self + + def add_user_fields(self, user_fields: TweetUserFieldsFilter | None): + if user_fields: + filtered_fields = [ + name for name, value in user_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["user.fields"] = ",".join( + [get_backend_user_field(field) for field in filtered_fields] + ) + return self + + def build(self): + return self.params + + +class SpaceExpansionsBuilder: + def __init__(self, param: Dict[str, Any]): + self.params: Dict[str, Any] = param + + def add_expansions(self, expansions: SpaceExpansionsFilter | None): + if expansions: + filtered_expansions = [ + name for name, value in expansions.dict().items() if value is True + ] + if filtered_expansions: + self.params["expansions"] = ",".join( + [get_backend_space_expansion(exp) for exp in filtered_expansions] + ) + return self + + def add_space_fields(self, space_fields: SpaceFieldsFilter | None): + if space_fields: + filtered_fields = [ + name for name, value in space_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["space.fields"] = ",".join( + [get_backend_space_field(field) for field in filtered_fields] + ) + return self + + def add_user_fields(self, user_fields: TweetUserFieldsFilter | None): + if user_fields: + filtered_fields = [ + name for name, value in user_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["user.fields"] = ",".join( + [get_backend_user_field(field) for field in filtered_fields] + ) + return self + + def build(self): + return self.params + + +class TweetDurationBuilder: + def __init__(self, param: Dict[str, Any]): + self.params: Dict[str, Any] = param + + def add_start_time(self, start_time: datetime | None): + if start_time: + self.params["start_time"] = start_time + return self + + def add_end_time(self, end_time: datetime | None): + if end_time: + self.params["end_time"] = end_time + return self + + def add_since_id(self, since_id: str | None): + if since_id: + self.params["since_id"] = since_id + return self + + def add_until_id(self, until_id: str | None): + if until_id: + self.params["until_id"] = until_id + return self + + def add_sort_order(self, sort_order: str | None): + if sort_order: + self.params["sort_order"] = sort_order + return self + + def build(self): + return self.params + + +class DMExpansionsBuilder: + def __init__(self, param: Dict[str, Any]): + self.params: Dict[str, Any] = param + + def add_expansions(self, expansions: DMEventExpansionFilter): + if expansions: + filtered_expansions = [ + name for name, value in expansions.dict().items() if value is True + ] + if filtered_expansions: + self.params["expansions"] = ",".join(filtered_expansions) + return self + + def add_event_types(self, event_types: DMEventTypeFilter): + if event_types: + filtered_types = [ + name for name, value in event_types.dict().items() if value is True + ] + if filtered_types: + self.params["event_types"] = ",".join(filtered_types) + return self + + def add_media_fields(self, media_fields: DMMediaFieldFilter): + if media_fields: + filtered_fields = [ + name for name, value in media_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["media.fields"] = ",".join(filtered_fields) + return self + + def add_tweet_fields(self, tweet_fields: DMTweetFieldFilter): + if tweet_fields: + filtered_fields = [ + name for name, value in tweet_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["tweet.fields"] = ",".join(filtered_fields) + return self + + def add_user_fields(self, user_fields: TweetUserFieldsFilter): + if user_fields: + filtered_fields = [ + name for name, value in user_fields.dict().items() if value is True + ] + if filtered_fields: + self.params["user.fields"] = ",".join(filtered_fields) + return self + + def build(self): + return self.params + + +# Specific Builders +class TweetSearchBuilder: + def __init__(self): + self.params: Dict[str, Any] = {"user_auth": False} + + def add_query(self, query: str): + if query: + self.params["query"] = query + return self + + def add_pagination(self, max_results: int, pagination: str | None): + if max_results: + self.params["max_results"] = max_results + if pagination: + self.params["pagination_token"] = pagination + return self + + def build(self): + return self.params + + +class TweetPostBuilder: + def __init__(self): + self.params: Dict[str, Any] = {"user_auth": False} + + def add_text(self, text: str | None): + if text: + self.params["text"] = text + return self + + def add_media(self, media_ids: list, tagged_user_ids: list): + if media_ids: + self.params["media_ids"] = media_ids + if tagged_user_ids: + self.params["media_tagged_user_ids"] = tagged_user_ids + return self + + def add_deep_link(self, link: str): + if link: + self.params["direct_message_deep_link"] = link + return self + + def add_super_followers(self, for_super_followers: bool): + if for_super_followers: + self.params["for_super_followers_only"] = for_super_followers + return self + + def add_place(self, place_id: str): + if place_id: + self.params["place_id"] = place_id + return self + + def add_poll_options(self, poll_options: list): + if poll_options: + self.params["poll_options"] = poll_options + return self + + def add_poll_duration(self, poll_duration_minutes: int): + if poll_duration_minutes: + self.params["poll_duration_minutes"] = poll_duration_minutes + return self + + def add_quote(self, quote_id: str): + if quote_id: + self.params["quote_tweet_id"] = quote_id + return self + + def add_reply_settings( + self, + exclude_user_ids: list, + reply_to_id: str, + settings: TweetReplySettingsFilter, + ): + if exclude_user_ids: + self.params["exclude_reply_user_ids"] = exclude_user_ids + if reply_to_id: + self.params["in_reply_to_tweet_id"] = reply_to_id + if settings.All_Users: + self.params["reply_settings"] = None + elif settings.Following_Users_Only: + self.params["reply_settings"] = "following" + elif settings.Mentioned_Users_Only: + self.params["reply_settings"] = "mentionedUsers" + return self + + def build(self): + return self.params + + +class TweetGetsBuilder: + def __init__(self): + self.params: Dict[str, Any] = {"user_auth": False} + + def add_id(self, tweet_id: list[str]): + self.params["id"] = tweet_id + return self + + def build(self): + return self.params diff --git a/autogpt_platform/backend/backend/blocks/twitter/_mappers.py b/autogpt_platform/backend/backend/blocks/twitter/_mappers.py new file mode 100644 index 0000000000..a564174ed0 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/_mappers.py @@ -0,0 +1,234 @@ +# -------------- Tweets ----------------- + +# Tweet Expansions +EXPANSION_FRONTEND_TO_BACKEND_MAPPING = { + "Poll_IDs": "attachments.poll_ids", + "Media_Keys": "attachments.media_keys", + "Author_User_ID": "author_id", + "Edit_History_Tweet_IDs": "edit_history_tweet_ids", + "Mentioned_Usernames": "entities.mentions.username", + "Place_ID": "geo.place_id", + "Reply_To_User_ID": "in_reply_to_user_id", + "Referenced_Tweet_ID": "referenced_tweets.id", + "Referenced_Tweet_Author_ID": "referenced_tweets.id.author_id", +} + + +def get_backend_expansion(frontend_key: str) -> str: + result = EXPANSION_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key) + if result is None: + raise KeyError(f"Invalid expansion key: {frontend_key}") + return result + + +# TweetReplySettings +REPLY_SETTINGS_FRONTEND_TO_BACKEND_MAPPING = { + "Mentioned_Users_Only": "mentionedUsers", + "Following_Users_Only": "following", + "All_Users": "all", +} + + +# TweetUserFields +def get_backend_reply_setting(frontend_key: str) -> str: + result = REPLY_SETTINGS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key) + if result is None: + raise KeyError(f"Invalid reply setting key: {frontend_key}") + return result + + +USER_FIELDS_FRONTEND_TO_BACKEND_MAPPING = { + "Account_Creation_Date": "created_at", + "User_Bio": "description", + "User_Entities": "entities", + "User_ID": "id", + "User_Location": "location", + "Latest_Tweet_ID": "most_recent_tweet_id", + "Display_Name": "name", + "Pinned_Tweet_ID": "pinned_tweet_id", + "Profile_Picture_URL": "profile_image_url", + "Is_Protected_Account": "protected", + "Account_Statistics": "public_metrics", + "Profile_URL": "url", + "Username": "username", + "Is_Verified": "verified", + "Verification_Type": "verified_type", + "Content_Withholding_Info": "withheld", +} + + +def get_backend_user_field(frontend_key: str) -> str: + result = USER_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key) + if result is None: + raise KeyError(f"Invalid user field key: {frontend_key}") + return result + + +# TweetFields +FIELDS_FRONTEND_TO_BACKEND_MAPPING = { + "Tweet_Attachments": "attachments", + "Author_ID": "author_id", + "Context_Annotations": "context_annotations", + "Conversation_ID": "conversation_id", + "Creation_Time": "created_at", + "Edit_Controls": "edit_controls", + "Tweet_Entities": "entities", + "Geographic_Location": "geo", + "Tweet_ID": "id", + "Reply_To_User_ID": "in_reply_to_user_id", + "Language": "lang", + "Public_Metrics": "public_metrics", + "Sensitive_Content_Flag": "possibly_sensitive", + "Referenced_Tweets": "referenced_tweets", + "Reply_Settings": "reply_settings", + "Tweet_Source": "source", + "Tweet_Text": "text", + "Withheld_Content": "withheld", +} + + +def get_backend_field(frontend_key: str) -> str: + result = FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key) + if result is None: + raise KeyError(f"Invalid field key: {frontend_key}") + return result + + +# TweetPollFields +POLL_FIELDS_FRONTEND_TO_BACKEND_MAPPING = { + "Duration_Minutes": "duration_minutes", + "End_DateTime": "end_datetime", + "Poll_ID": "id", + "Poll_Options": "options", + "Voting_Status": "voting_status", +} + + +def get_backend_poll_field(frontend_key: str) -> str: + result = POLL_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key) + if result is None: + raise KeyError(f"Invalid poll field key: {frontend_key}") + return result + + +PLACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING = { + "Contained_Within_Places": "contained_within", + "Country": "country", + "Country_Code": "country_code", + "Full_Location_Name": "full_name", + "Geographic_Coordinates": "geo", + "Place_ID": "id", + "Place_Name": "name", + "Place_Type": "place_type", +} + + +def get_backend_place_field(frontend_key: str) -> str: + result = PLACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key) + if result is None: + raise KeyError(f"Invalid place field key: {frontend_key}") + return result + + +# TweetMediaFields +MEDIA_FIELDS_FRONTEND_TO_BACKEND_MAPPING = { + "Duration_in_Milliseconds": "duration_ms", + "Height": "height", + "Media_Key": "media_key", + "Preview_Image_URL": "preview_image_url", + "Media_Type": "type", + "Media_URL": "url", + "Width": "width", + "Public_Metrics": "public_metrics", + "Non_Public_Metrics": "non_public_metrics", + "Organic_Metrics": "organic_metrics", + "Promoted_Metrics": "promoted_metrics", + "Alternative_Text": "alt_text", + "Media_Variants": "variants", +} + + +def get_backend_media_field(frontend_key: str) -> str: + result = MEDIA_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key) + if result is None: + raise KeyError(f"Invalid media field key: {frontend_key}") + return result + + +# -------------- Spaces ----------------- + +# SpaceExpansions +EXPANSION_FRONTEND_TO_BACKEND_MAPPING_SPACE = { + "Invited_Users": "invited_user_ids", + "Speakers": "speaker_ids", + "Creator": "creator_id", + "Hosts": "host_ids", + "Topics": "topic_ids", +} + + +def get_backend_space_expansion(frontend_key: str) -> str: + result = EXPANSION_FRONTEND_TO_BACKEND_MAPPING_SPACE.get(frontend_key) + if result is None: + raise KeyError(f"Invalid expansion key: {frontend_key}") + return result + + +# SpaceFields +SPACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING = { + "Space_ID": "id", + "Space_State": "state", + "Creation_Time": "created_at", + "End_Time": "ended_at", + "Host_User_IDs": "host_ids", + "Language": "lang", + "Is_Ticketed": "is_ticketed", + "Invited_User_IDs": "invited_user_ids", + "Participant_Count": "participant_count", + "Subscriber_Count": "subscriber_count", + "Scheduled_Start_Time": "scheduled_start", + "Speaker_User_IDs": "speaker_ids", + "Start_Time": "started_at", + "Space_Title": "title", + "Topic_IDs": "topic_ids", + "Last_Updated_Time": "updated_at", +} + + +def get_backend_space_field(frontend_key: str) -> str: + result = SPACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key) + if result is None: + raise KeyError(f"Invalid space field key: {frontend_key}") + return result + + +# -------------- List Expansions ----------------- + +# ListExpansions +LIST_EXPANSION_FRONTEND_TO_BACKEND_MAPPING = {"List_Owner_ID": "owner_id"} + + +def get_backend_list_expansion(frontend_key: str) -> str: + result = LIST_EXPANSION_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key) + if result is None: + raise KeyError(f"Invalid list expansion key: {frontend_key}") + return result + + +LIST_FIELDS_FRONTEND_TO_BACKEND_MAPPING = { + "List_ID": "id", + "List_Name": "name", + "Creation_Date": "created_at", + "Description": "description", + "Follower_Count": "follower_count", + "Member_Count": "member_count", + "Is_Private": "private", + "Owner_ID": "owner_id", +} + + +def get_backend_list_field(frontend_key: str) -> str: + result = LIST_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key) + if result is None: + raise KeyError(f"Invalid list field key: {frontend_key}") + return result diff --git a/autogpt_platform/backend/backend/blocks/twitter/_serializer.py b/autogpt_platform/backend/backend/blocks/twitter/_serializer.py new file mode 100644 index 0000000000..906c524456 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/_serializer.py @@ -0,0 +1,76 @@ +from typing import Any, Dict, List + + +class BaseSerializer: + @staticmethod + def _serialize_value(value: Any) -> Any: + """Helper method to serialize individual values""" + if hasattr(value, "data"): + return value.data + return value + + +class IncludesSerializer(BaseSerializer): + @classmethod + def serialize(cls, includes: Dict[str, Any]) -> Dict[str, Any]: + """Serializes the includes dictionary""" + if not includes: + return {} + + serialized_includes = {} + for key, value in includes.items(): + if isinstance(value, list): + serialized_includes[key] = [ + cls._serialize_value(item) for item in value + ] + else: + serialized_includes[key] = cls._serialize_value(value) + + return serialized_includes + + +class ResponseDataSerializer(BaseSerializer): + @classmethod + def serialize_dict(cls, item: Dict[str, Any]) -> Dict[str, Any]: + """Serializes a single dictionary item""" + serialized_item = {} + + if hasattr(item, "__dict__"): + items = item.__dict__.items() + else: + items = item.items() + + for key, value in items: + if isinstance(value, list): + serialized_item[key] = [ + cls._serialize_value(sub_item) for sub_item in value + ] + else: + serialized_item[key] = cls._serialize_value(value) + + return serialized_item + + @classmethod + def serialize_list(cls, data: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """Serializes a list of dictionary items""" + return [cls.serialize_dict(item) for item in data] + + +class ResponseSerializer: + @classmethod + def serialize(cls, response) -> Dict[str, Any]: + """Main serializer that handles both data and includes""" + result = {"data": None, "included": {}} + + # Handle response.data + if response.data: + if isinstance(response.data, list): + result["data"] = ResponseDataSerializer.serialize_list(response.data) + else: + result["data"] = ResponseDataSerializer.serialize_dict(response.data) + + # Handle includes + if hasattr(response, "includes") and response.includes: + result["included"] = IncludesSerializer.serialize(response.includes) + + return result diff --git a/autogpt_platform/backend/backend/blocks/twitter/_types.py b/autogpt_platform/backend/backend/blocks/twitter/_types.py new file mode 100644 index 0000000000..2b404e4f56 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/_types.py @@ -0,0 +1,443 @@ +from datetime import datetime +from enum import Enum + +from pydantic import BaseModel + +from backend.data.block import BlockSchema +from backend.data.model import SchemaField + +# -------------- Tweets ----------------- + + +class TweetReplySettingsFilter(BaseModel): + Mentioned_Users_Only: bool = False + Following_Users_Only: bool = False + All_Users: bool = False + + +class TweetUserFieldsFilter(BaseModel): + Account_Creation_Date: bool = False + User_Bio: bool = False + User_Entities: bool = False + User_ID: bool = False + User_Location: bool = False + Latest_Tweet_ID: bool = False + Display_Name: bool = False + Pinned_Tweet_ID: bool = False + Profile_Picture_URL: bool = False + Is_Protected_Account: bool = False + Account_Statistics: bool = False + Profile_URL: bool = False + Username: bool = False + Is_Verified: bool = False + Verification_Type: bool = False + Content_Withholding_Info: bool = False + + +class TweetFieldsFilter(BaseModel): + Tweet_Attachments: bool = False + Author_ID: bool = False + Context_Annotations: bool = False + Conversation_ID: bool = False + Creation_Time: bool = False + Edit_Controls: bool = False + Tweet_Entities: bool = False + Geographic_Location: bool = False + Tweet_ID: bool = False + Reply_To_User_ID: bool = False + Language: bool = False + Public_Metrics: bool = False + Sensitive_Content_Flag: bool = False + Referenced_Tweets: bool = False + Reply_Settings: bool = False + Tweet_Source: bool = False + Tweet_Text: bool = False + Withheld_Content: bool = False + + +class PersonalTweetFieldsFilter(BaseModel): + attachments: bool = False + author_id: bool = False + context_annotations: bool = False + conversation_id: bool = False + created_at: bool = False + edit_controls: bool = False + entities: bool = False + geo: bool = False + id: bool = False + in_reply_to_user_id: bool = False + lang: bool = False + non_public_metrics: bool = False + public_metrics: bool = False + organic_metrics: bool = False + promoted_metrics: bool = False + possibly_sensitive: bool = False + referenced_tweets: bool = False + reply_settings: bool = False + source: bool = False + text: bool = False + withheld: bool = False + + +class TweetPollFieldsFilter(BaseModel): + Duration_Minutes: bool = False + End_DateTime: bool = False + Poll_ID: bool = False + Poll_Options: bool = False + Voting_Status: bool = False + + +class TweetPlaceFieldsFilter(BaseModel): + Contained_Within_Places: bool = False + Country: bool = False + Country_Code: bool = False + Full_Location_Name: bool = False + Geographic_Coordinates: bool = False + Place_ID: bool = False + Place_Name: bool = False + Place_Type: bool = False + + +class TweetMediaFieldsFilter(BaseModel): + Duration_in_Milliseconds: bool = False + Height: bool = False + Media_Key: bool = False + Preview_Image_URL: bool = False + Media_Type: bool = False + Media_URL: bool = False + Width: bool = False + Public_Metrics: bool = False + Non_Public_Metrics: bool = False + Organic_Metrics: bool = False + Promoted_Metrics: bool = False + Alternative_Text: bool = False + Media_Variants: bool = False + + +class ExpansionFilter(BaseModel): + Poll_IDs: bool = False + Media_Keys: bool = False + Author_User_ID: bool = False + Edit_History_Tweet_IDs: bool = False + Mentioned_Usernames: bool = False + Place_ID: bool = False + Reply_To_User_ID: bool = False + Referenced_Tweet_ID: bool = False + Referenced_Tweet_Author_ID: bool = False + + +class TweetExcludesFilter(BaseModel): + retweets: bool = False + replies: bool = False + + +# -------------- Users ----------------- + + +class UserExpansionsFilter(BaseModel): + pinned_tweet_id: bool = False + + +# -------------- DM's' ----------------- + + +class DMEventFieldFilter(BaseModel): + id: bool = False + text: bool = False + event_type: bool = False + created_at: bool = False + dm_conversation_id: bool = False + sender_id: bool = False + participant_ids: bool = False + referenced_tweets: bool = False + attachments: bool = False + + +class DMEventTypeFilter(BaseModel): + MessageCreate: bool = False + ParticipantsJoin: bool = False + ParticipantsLeave: bool = False + + +class DMEventExpansionFilter(BaseModel): + attachments_media_keys: bool = False + referenced_tweets_id: bool = False + sender_id: bool = False + participant_ids: bool = False + + +class DMMediaFieldFilter(BaseModel): + duration_ms: bool = False + height: bool = False + media_key: bool = False + preview_image_url: bool = False + type: bool = False + url: bool = False + width: bool = False + public_metrics: bool = False + alt_text: bool = False + variants: bool = False + + +class DMTweetFieldFilter(BaseModel): + attachments: bool = False + author_id: bool = False + context_annotations: bool = False + conversation_id: bool = False + created_at: bool = False + edit_controls: bool = False + entities: bool = False + geo: bool = False + id: bool = False + in_reply_to_user_id: bool = False + lang: bool = False + public_metrics: bool = False + possibly_sensitive: bool = False + referenced_tweets: bool = False + reply_settings: bool = False + source: bool = False + text: bool = False + withheld: bool = False + + +# -------------- Spaces ----------------- + + +class SpaceExpansionsFilter(BaseModel): + Invited_Users: bool = False + Speakers: bool = False + Creator: bool = False + Hosts: bool = False + Topics: bool = False + + +class SpaceFieldsFilter(BaseModel): + Space_ID: bool = False + Space_State: bool = False + Creation_Time: bool = False + End_Time: bool = False + Host_User_IDs: bool = False + Language: bool = False + Is_Ticketed: bool = False + Invited_User_IDs: bool = False + Participant_Count: bool = False + Subscriber_Count: bool = False + Scheduled_Start_Time: bool = False + Speaker_User_IDs: bool = False + Start_Time: bool = False + Space_Title: bool = False + Topic_IDs: bool = False + Last_Updated_Time: bool = False + + +class SpaceStatesFilter(str, Enum): + live = "live" + scheduled = "scheduled" + all = "all" + + +# -------------- List Expansions ----------------- + + +class ListExpansionsFilter(BaseModel): + List_Owner_ID: bool = False + + +class ListFieldsFilter(BaseModel): + List_ID: bool = False + List_Name: bool = False + Creation_Date: bool = False + Description: bool = False + Follower_Count: bool = False + Member_Count: bool = False + Is_Private: bool = False + Owner_ID: bool = False + + +# --------- [Input Types] ------------- +class TweetExpansionInputs(BlockSchema): + + expansions: ExpansionFilter | None = SchemaField( + description="Choose what extra information you want to get with your tweets. For example:\n- Select 'Media_Keys' to get media details\n- Select 'Author_User_ID' to get user information\n- Select 'Place_ID' to get location details", + placeholder="Pick the extra information you want to see", + default=None, + advanced=True, + ) + + media_fields: TweetMediaFieldsFilter | None = SchemaField( + description="Select what media information you want to see (images, videos, etc). To use this, you must first select 'Media_Keys' in the expansions above.", + placeholder="Choose what media details you want to see", + default=None, + advanced=True, + ) + + place_fields: TweetPlaceFieldsFilter | None = SchemaField( + description="Select what location information you want to see (country, coordinates, etc). To use this, you must first select 'Place_ID' in the expansions above.", + placeholder="Choose what location details you want to see", + default=None, + advanced=True, + ) + + poll_fields: TweetPollFieldsFilter | None = SchemaField( + description="Select what poll information you want to see (options, voting status, etc). To use this, you must first select 'Poll_IDs' in the expansions above.", + placeholder="Choose what poll details you want to see", + default=None, + advanced=True, + ) + + tweet_fields: TweetFieldsFilter | None = SchemaField( + description="Select what tweet information you want to see. For referenced tweets (like retweets), select 'Referenced_Tweet_ID' in the expansions above.", + placeholder="Choose what tweet details you want to see", + default=None, + advanced=True, + ) + + user_fields: TweetUserFieldsFilter | None = SchemaField( + description="Select what user information you want to see. To use this, you must first select one of these in expansions above:\n- 'Author_User_ID' for tweet authors\n- 'Mentioned_Usernames' for mentioned users\n- 'Reply_To_User_ID' for users being replied to\n- 'Referenced_Tweet_Author_ID' for authors of referenced tweets", + placeholder="Choose what user details you want to see", + default=None, + advanced=True, + ) + + +class DMEventExpansionInputs(BlockSchema): + expansions: DMEventExpansionFilter | None = SchemaField( + description="Select expansions to include related data objects in the 'includes' section.", + placeholder="Enter expansions", + default=None, + advanced=True, + ) + + event_types: DMEventTypeFilter | None = SchemaField( + description="Select DM event types to include in the response.", + placeholder="Enter event types", + default=None, + advanced=True, + ) + + media_fields: DMMediaFieldFilter | None = SchemaField( + description="Select media fields to include in the response (requires expansions=attachments.media_keys).", + placeholder="Enter media fields", + default=None, + advanced=True, + ) + + tweet_fields: DMTweetFieldFilter | None = SchemaField( + description="Select tweet fields to include in the response (requires expansions=referenced_tweets.id).", + placeholder="Enter tweet fields", + default=None, + advanced=True, + ) + + user_fields: TweetUserFieldsFilter | None = SchemaField( + description="Select user fields to include in the response (requires expansions=sender_id or participant_ids).", + placeholder="Enter user fields", + default=None, + advanced=True, + ) + + +class UserExpansionInputs(BlockSchema): + expansions: UserExpansionsFilter | None = SchemaField( + description="Choose what extra information you want to get with user data. Currently only 'pinned_tweet_id' is available to see a user's pinned tweet.", + placeholder="Select extra user information to include", + default=None, + advanced=True, + ) + + tweet_fields: TweetFieldsFilter | None = SchemaField( + description="Select what tweet information you want to see in pinned tweets. This only works if you select 'pinned_tweet_id' in expansions above.", + placeholder="Choose what details to see in pinned tweets", + default=None, + advanced=True, + ) + + user_fields: TweetUserFieldsFilter | None = SchemaField( + description="Select what user information you want to see, like username, bio, profile picture, etc.", + placeholder="Choose what user details you want to see", + default=None, + advanced=True, + ) + + +class SpaceExpansionInputs(BlockSchema): + expansions: SpaceExpansionsFilter | None = SchemaField( + description="Choose additional information you want to get with your Twitter Spaces:\n- Select 'Invited_Users' to see who was invited\n- Select 'Speakers' to see who can speak\n- Select 'Creator' to get details about who made the Space\n- Select 'Hosts' to see who's hosting\n- Select 'Topics' to see Space topics", + placeholder="Pick what extra information you want to see about the Space", + default=None, + advanced=True, + ) + + space_fields: SpaceFieldsFilter | None = SchemaField( + description="Choose what Space details you want to see, such as:\n- Title\n- Start/End times\n- Number of participants\n- Language\n- State (live/scheduled)\n- And more", + placeholder="Choose what Space information you want to get", + default=SpaceFieldsFilter(Space_Title=True, Host_User_IDs=True), + advanced=True, + ) + + user_fields: TweetUserFieldsFilter | None = SchemaField( + description="Choose what user information you want to see. This works when you select any of these in expansions above:\n- 'Creator' for Space creator details\n- 'Hosts' for host information\n- 'Speakers' for speaker details\n- 'Invited_Users' for invited user information", + placeholder="Pick what details you want to see about the users", + default=None, + advanced=True, + ) + + +class ListExpansionInputs(BlockSchema): + expansions: ListExpansionsFilter | None = SchemaField( + description="Choose what extra information you want to get with your Twitter Lists:\n- Select 'List_Owner_ID' to get details about who owns the list\n\nThis will let you see more details about the list owner when you also select user fields below.", + placeholder="Pick what extra list information you want to see", + default=ListExpansionsFilter(List_Owner_ID=True), + advanced=True, + ) + + user_fields: TweetUserFieldsFilter | None = SchemaField( + description="Choose what information you want to see about list owners. This only works when you select 'List_Owner_ID' in expansions above.\n\nYou can see things like:\n- Their username\n- Profile picture\n- Account details\n- And more", + placeholder="Select what details you want to see about list owners", + default=TweetUserFieldsFilter(User_ID=True, Username=True), + advanced=True, + ) + + list_fields: ListFieldsFilter | None = SchemaField( + description="Choose what information you want to see about the Twitter Lists themselves, such as:\n- List name\n- Description\n- Number of followers\n- Number of members\n- Whether it's private\n- Creation date\n- And more", + placeholder="Pick what list details you want to see", + default=ListFieldsFilter(Owner_ID=True), + advanced=True, + ) + + +class TweetTimeWindowInputs(BlockSchema): + start_time: datetime | None = SchemaField( + description="Start time in YYYY-MM-DDTHH:mm:ssZ format", + placeholder="Enter start time", + default=None, + advanced=False, + ) + + end_time: datetime | None = SchemaField( + description="End time in YYYY-MM-DDTHH:mm:ssZ format", + placeholder="Enter end time", + default=None, + advanced=False, + ) + + since_id: str | None = SchemaField( + description="Returns results with Tweet ID greater than this (more recent than), we give priority to since_id over start_time", + placeholder="Enter since ID", + default=None, + advanced=True, + ) + + until_id: str | None = SchemaField( + description="Returns results with Tweet ID less than this (that is, older than), and used with since_id", + placeholder="Enter until ID", + default=None, + advanced=True, + ) + + sort_order: str | None = SchemaField( + description="Order of returned tweets (recency or relevancy)", + placeholder="Enter sort order", + default=None, + advanced=True, + ) diff --git a/autogpt_platform/backend/backend/blocks/twitter/direct_message/direct_message_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/direct_message/direct_message_lookup.py new file mode 100644 index 0000000000..56a62d166e --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/direct_message/direct_message_lookup.py @@ -0,0 +1,201 @@ +# Todo : Add new Type support + +# from typing import cast +# import tweepy +# from tweepy.client import Response + +# from backend.blocks.twitter._serializer import IncludesSerializer, ResponseDataSerializer +# from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +# from backend.data.model import SchemaField +# from backend.blocks.twitter._builders import DMExpansionsBuilder +# from backend.blocks.twitter._types import DMEventExpansion, DMEventExpansionInputs, DMEventType, DMMediaField, DMTweetField, TweetUserFields +# from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +# from backend.blocks.twitter._auth import ( +# TEST_CREDENTIALS, +# TEST_CREDENTIALS_INPUT, +# TwitterCredentials, +# TwitterCredentialsField, +# TwitterCredentialsInput, +# ) + +# Require Pro or Enterprise plan [Manual Testing Required] +# class TwitterGetDMEventsBlock(Block): +# """ +# Gets a list of Direct Message events for the authenticated user +# """ + +# class Input(DMEventExpansionInputs): +# credentials: TwitterCredentialsInput = TwitterCredentialsField( +# ["dm.read", "offline.access", "user.read", "tweet.read"] +# ) + +# dm_conversation_id: str = SchemaField( +# description="The ID of the Direct Message conversation", +# placeholder="Enter conversation ID", +# required=True +# ) + +# max_results: int = SchemaField( +# description="Maximum number of results to return (1-100)", +# placeholder="Enter max results", +# advanced=True, +# default=10, +# ) + +# pagination_token: str = SchemaField( +# description="Token for pagination", +# placeholder="Enter pagination token", +# advanced=True, +# default="" +# ) + +# class Output(BlockSchema): +# # Common outputs +# event_ids: list[str] = SchemaField(description="DM Event IDs") +# event_texts: list[str] = SchemaField(description="DM Event text contents") +# event_types: list[str] = SchemaField(description="Types of DM events") +# next_token: str = SchemaField(description="Token for next page of results") + +# # Complete outputs +# data: list[dict] = SchemaField(description="Complete DM events data") +# included: dict = SchemaField(description="Additional data requested via expansions") +# meta: dict = SchemaField(description="Metadata about the response") +# error: str = SchemaField(description="Error message if request failed") + +# def __init__(self): +# super().__init__( +# id="dc37a6d4-a62e-11ef-a3a5-03061375737b", +# description="This block retrieves Direct Message events for the authenticated user.", +# categories={BlockCategory.SOCIAL}, +# input_schema=TwitterGetDMEventsBlock.Input, +# output_schema=TwitterGetDMEventsBlock.Output, +# test_input={ +# "dm_conversation_id": "1234567890", +# "max_results": 10, +# "credentials": TEST_CREDENTIALS_INPUT, +# "expansions": [], +# "event_types": [], +# "media_fields": [], +# "tweet_fields": [], +# "user_fields": [] +# }, +# test_credentials=TEST_CREDENTIALS, +# test_output=[ +# ("event_ids", ["1346889436626259968"]), +# ("event_texts", ["Hello just you..."]), +# ("event_types", ["MessageCreate"]), +# ("next_token", None), +# ("data", [{"id": "1346889436626259968", "text": "Hello just you...", "event_type": "MessageCreate"}]), +# ("included", {}), +# ("meta", {}), +# ("error", "") +# ], +# test_mock={ +# "get_dm_events": lambda *args, **kwargs: ( +# [{"id": "1346889436626259968", "text": "Hello just you...", "event_type": "MessageCreate"}], +# {}, +# {}, +# ["1346889436626259968"], +# ["Hello just you..."], +# ["MessageCreate"], +# None +# ) +# } +# ) + +# @staticmethod +# def get_dm_events( +# credentials: TwitterCredentials, +# dm_conversation_id: str, +# max_results: int, +# pagination_token: str, +# expansions: list[DMEventExpansion], +# event_types: list[DMEventType], +# media_fields: list[DMMediaField], +# tweet_fields: list[DMTweetField], +# user_fields: list[TweetUserFields] +# ): +# try: +# client = tweepy.Client( +# bearer_token=credentials.access_token.get_secret_value() +# ) + +# params = { +# "dm_conversation_id": dm_conversation_id, +# "max_results": max_results, +# "pagination_token": None if pagination_token == "" else pagination_token, +# "user_auth": False +# } + +# params = (DMExpansionsBuilder(params) +# .add_expansions(expansions) +# .add_event_types(event_types) +# .add_media_fields(media_fields) +# .add_tweet_fields(tweet_fields) +# .add_user_fields(user_fields) +# .build()) + +# response = cast(Response, client.get_direct_message_events(**params)) + +# meta = {} +# event_ids = [] +# event_texts = [] +# event_types = [] +# next_token = None + +# if response.meta: +# meta = response.meta +# next_token = meta.get("next_token") + +# included = IncludesSerializer.serialize(response.includes) +# data = ResponseDataSerializer.serialize_list(response.data) + +# if response.data: +# event_ids = [str(item.id) for item in response.data] +# event_texts = [item.text if hasattr(item, "text") else None for item in response.data] +# event_types = [item.event_type for item in response.data] + +# return data, included, meta, event_ids, event_texts, event_types, next_token + +# raise Exception("No DM events found") + +# except tweepy.TweepyException: +# raise + +# def run( +# self, +# input_data: Input, +# *, +# credentials: TwitterCredentials, +# **kwargs, +# ) -> BlockOutput: +# try: +# event_data, included, meta, event_ids, event_texts, event_types, next_token = self.get_dm_events( +# credentials, +# input_data.dm_conversation_id, +# input_data.max_results, +# input_data.pagination_token, +# input_data.expansions, +# input_data.event_types, +# input_data.media_fields, +# input_data.tweet_fields, +# input_data.user_fields +# ) + +# if event_ids: +# yield "event_ids", event_ids +# if event_texts: +# yield "event_texts", event_texts +# if event_types: +# yield "event_types", event_types +# if next_token: +# yield "next_token", next_token +# if event_data: +# yield "data", event_data +# if included: +# yield "included", included +# if meta: +# yield "meta", meta + +# except Exception as e: +# yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/direct_message/manage_direct_message.py b/autogpt_platform/backend/backend/blocks/twitter/direct_message/manage_direct_message.py new file mode 100644 index 0000000000..f25331db97 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/direct_message/manage_direct_message.py @@ -0,0 +1,260 @@ +# Todo : Add new Type support + +# from typing import cast + +# import tweepy +# from tweepy.client import Response + +# from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +# from backend.data.model import SchemaField +# from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +# from backend.blocks.twitter._auth import ( +# TEST_CREDENTIALS, +# TEST_CREDENTIALS_INPUT, +# TwitterCredentials, +# TwitterCredentialsField, +# TwitterCredentialsInput, +# ) + +# Pro and Enterprise plan [Manual Testing Required] +# class TwitterSendDirectMessageBlock(Block): +# """ +# Sends a direct message to a Twitter user +# """ + +# class Input(BlockSchema): +# credentials: TwitterCredentialsInput = TwitterCredentialsField( +# ["offline.access", "direct_messages.write"] +# ) + +# participant_id: str = SchemaField( +# description="The User ID of the account to send DM to", +# placeholder="Enter recipient user ID", +# default="", +# advanced=False +# ) + +# dm_conversation_id: str = SchemaField( +# description="The conversation ID to send message to", +# placeholder="Enter conversation ID", +# default="", +# advanced=False +# ) + +# text: str = SchemaField( +# description="Text of the Direct Message (up to 10,000 characters)", +# placeholder="Enter message text", +# default="", +# advanced=False +# ) + +# media_id: str = SchemaField( +# description="Media ID to attach to the message", +# placeholder="Enter media ID", +# default="" +# ) + +# class Output(BlockSchema): +# dm_event_id: str = SchemaField(description="ID of the sent direct message") +# dm_conversation_id_: str = SchemaField(description="ID of the conversation") +# error: str = SchemaField(description="Error message if sending failed") + +# def __init__(self): +# super().__init__( +# id="f32f2786-a62e-11ef-a93d-a3ef199dde7f", +# description="This block sends a direct message to a specified Twitter user.", +# categories={BlockCategory.SOCIAL}, +# input_schema=TwitterSendDirectMessageBlock.Input, +# output_schema=TwitterSendDirectMessageBlock.Output, +# test_input={ +# "participant_id": "783214", +# "dm_conversation_id": "", +# "text": "Hello from Twitter API", +# "media_id": "", +# "credentials": TEST_CREDENTIALS_INPUT +# }, +# test_credentials=TEST_CREDENTIALS, +# test_output=[ +# ("dm_event_id", "0987654321"), +# ("dm_conversation_id_", "1234567890"), +# ("error", "") +# ], +# test_mock={ +# "send_direct_message": lambda *args, **kwargs: ( +# "0987654321", +# "1234567890" +# ) +# }, +# ) + +# @staticmethod +# def send_direct_message( +# credentials: TwitterCredentials, +# participant_id: str, +# dm_conversation_id: str, +# text: str, +# media_id: str +# ): +# try: +# client = tweepy.Client( +# bearer_token=credentials.access_token.get_secret_value() +# ) + +# response = cast( +# Response, +# client.create_direct_message( +# participant_id=None if participant_id == "" else participant_id, +# dm_conversation_id=None if dm_conversation_id == "" else dm_conversation_id, +# text=None if text == "" else text, +# media_id=None if media_id == "" else media_id, +# user_auth=False +# ) +# ) + +# if not response.data: +# raise Exception("Failed to send direct message") + +# return response.data["dm_event_id"], response.data["dm_conversation_id"] + +# except tweepy.TweepyException: +# raise +# except Exception as e: +# print(f"Unexpected error: {str(e)}") +# raise + +# def run( +# self, +# input_data: Input, +# *, +# credentials: TwitterCredentials, +# **kwargs, +# ) -> BlockOutput: +# try: +# dm_event_id, dm_conversation_id = self.send_direct_message( +# credentials, +# input_data.participant_id, +# input_data.dm_conversation_id, +# input_data.text, +# input_data.media_id +# ) +# yield "dm_event_id", dm_event_id +# yield "dm_conversation_id", dm_conversation_id + +# except Exception as e: +# yield "error", handle_tweepy_exception(e) + +# class TwitterCreateDMConversationBlock(Block): +# """ +# Creates a new group direct message conversation on Twitter +# """ + +# class Input(BlockSchema): +# credentials: TwitterCredentialsInput = TwitterCredentialsField( +# ["offline.access", "dm.write","dm.read","tweet.read","user.read"] +# ) + +# participant_ids: list[str] = SchemaField( +# description="Array of User IDs to create conversation with (max 50)", +# placeholder="Enter participant user IDs", +# default=[], +# advanced=False +# ) + +# text: str = SchemaField( +# description="Text of the Direct Message (up to 10,000 characters)", +# placeholder="Enter message text", +# default="", +# advanced=False +# ) + +# media_id: str = SchemaField( +# description="Media ID to attach to the message", +# placeholder="Enter media ID", +# default="", +# advanced=False +# ) + +# class Output(BlockSchema): +# dm_event_id: str = SchemaField(description="ID of the sent direct message") +# dm_conversation_id: str = SchemaField(description="ID of the conversation") +# error: str = SchemaField(description="Error message if sending failed") + +# def __init__(self): +# super().__init__( +# id="ec11cabc-a62e-11ef-8c0e-3fe37ba2ec92", +# description="This block creates a new group DM conversation with specified Twitter users.", +# categories={BlockCategory.SOCIAL}, +# input_schema=TwitterCreateDMConversationBlock.Input, +# output_schema=TwitterCreateDMConversationBlock.Output, +# test_input={ +# "participant_ids": ["783214", "2244994945"], +# "text": "Hello from Twitter API", +# "media_id": "", +# "credentials": TEST_CREDENTIALS_INPUT +# }, +# test_credentials=TEST_CREDENTIALS, +# test_output=[ +# ("dm_event_id", "0987654321"), +# ("dm_conversation_id", "1234567890"), +# ("error", "") +# ], +# test_mock={ +# "create_dm_conversation": lambda *args, **kwargs: ( +# "0987654321", +# "1234567890" +# ) +# }, +# ) + +# @staticmethod +# def create_dm_conversation( +# credentials: TwitterCredentials, +# participant_ids: list[str], +# text: str, +# media_id: str +# ): +# try: +# client = tweepy.Client( +# bearer_token=credentials.access_token.get_secret_value() +# ) + +# response = cast( +# Response, +# client.create_direct_message_conversation( +# participant_ids=participant_ids, +# text=None if text == "" else text, +# media_id=None if media_id == "" else media_id, +# user_auth=False +# ) +# ) + +# if not response.data: +# raise Exception("Failed to create DM conversation") + +# return response.data["dm_event_id"], response.data["dm_conversation_id"] + +# except tweepy.TweepyException: +# raise +# except Exception as e: +# print(f"Unexpected error: {str(e)}") +# raise + +# def run( +# self, +# input_data: Input, +# *, +# credentials: TwitterCredentials, +# **kwargs, +# ) -> BlockOutput: +# try: +# dm_event_id, dm_conversation_id = self.create_dm_conversation( +# credentials, +# input_data.participant_ids, +# input_data.text, +# input_data.media_id +# ) +# yield "dm_event_id", dm_event_id +# yield "dm_conversation_id", dm_conversation_id + +# except Exception as e: +# yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/list_follows.py b/autogpt_platform/backend/backend/blocks/twitter/lists/list_follows.py new file mode 100644 index 0000000000..99a0108296 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/lists/list_follows.py @@ -0,0 +1,470 @@ +# from typing import cast +import tweepy + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) + +# from backend.blocks.twitter._builders import UserExpansionsBuilder +# from backend.blocks.twitter._types import TweetFields, TweetUserFields, UserExpansionInputs, UserExpansions +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + +# from tweepy.client import Response + + +class TwitterUnfollowListBlock(Block): + """ + Unfollows a Twitter list for the authenticated user + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["follows.write", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List to unfollow", + placeholder="Enter list ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the unfollow was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="1f43310a-a62f-11ef-8276-2b06a1bbae1a", + description="This block unfollows a specified Twitter list for the authenticated user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterUnfollowListBlock.Input, + output_schema=TwitterUnfollowListBlock.Output, + test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT}, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"unfollow_list": lambda *args, **kwargs: True}, + ) + + @staticmethod + def unfollow_list(credentials: TwitterCredentials, list_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.unfollow_list(list_id=list_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.unfollow_list(credentials, input_data.list_id) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterFollowListBlock(Block): + """ + Follows a Twitter list for the authenticated user + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "list.write", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List to follow", + placeholder="Enter list ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the follow was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="03d8acf6-a62f-11ef-b17f-b72b04a09e79", + description="This block follows a specified Twitter list for the authenticated user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterFollowListBlock.Input, + output_schema=TwitterFollowListBlock.Output, + test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT}, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"follow_list": lambda *args, **kwargs: True}, + ) + + @staticmethod + def follow_list(credentials: TwitterCredentials, list_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.follow_list(list_id=list_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.follow_list(credentials, input_data.list_id) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +# Enterprise Level [Need to do Manual testing], There is a high possibility that we might get error in this +# Needs Type Input in this + +# class TwitterListGetFollowersBlock(Block): +# """ +# Gets followers of a specified Twitter list +# """ + +# class Input(UserExpansionInputs): +# credentials: TwitterCredentialsInput = TwitterCredentialsField( +# ["tweet.read","users.read", "list.read", "offline.access"] +# ) + +# list_id: str = SchemaField( +# description="The ID of the List to get followers for", +# placeholder="Enter list ID", +# required=True +# ) + +# max_results: int = SchemaField( +# description="Max number of results per page (1-100)", +# placeholder="Enter max results", +# default=10, +# advanced=True, +# ) + +# pagination_token: str = SchemaField( +# description="Token for pagination", +# placeholder="Enter pagination token", +# default="", +# advanced=True, +# ) + +# class Output(BlockSchema): +# user_ids: list[str] = SchemaField(description="List of user IDs of followers") +# usernames: list[str] = SchemaField(description="List of usernames of followers") +# next_token: str = SchemaField(description="Token for next page of results") +# data: list[dict] = SchemaField(description="Complete follower data") +# included: dict = SchemaField(description="Additional data requested via expansions") +# meta: dict = SchemaField(description="Metadata about the response") +# error: str = SchemaField(description="Error message if the request failed") + +# def __init__(self): +# super().__init__( +# id="16b289b4-a62f-11ef-95d4-bb29b849eb99", +# description="This block retrieves followers of a specified Twitter list.", +# categories={BlockCategory.SOCIAL}, +# input_schema=TwitterListGetFollowersBlock.Input, +# output_schema=TwitterListGetFollowersBlock.Output, +# test_input={ +# "list_id": "123456789", +# "max_results": 10, +# "pagination_token": None, +# "credentials": TEST_CREDENTIALS_INPUT, +# "expansions": [], +# "tweet_fields": [], +# "user_fields": [] +# }, +# test_credentials=TEST_CREDENTIALS, +# test_output=[ +# ("user_ids", ["2244994945"]), +# ("usernames", ["testuser"]), +# ("next_token", None), +# ("data", {"followers": [{"id": "2244994945", "username": "testuser"}]}), +# ("included", {}), +# ("meta", {}), +# ("error", "") +# ], +# test_mock={ +# "get_list_followers": lambda *args, **kwargs: ({ +# "followers": [{"id": "2244994945", "username": "testuser"}] +# }, {}, {}, ["2244994945"], ["testuser"], None) +# } +# ) + +# @staticmethod +# def get_list_followers( +# credentials: TwitterCredentials, +# list_id: str, +# max_results: int, +# pagination_token: str, +# expansions: list[UserExpansions], +# tweet_fields: list[TweetFields], +# user_fields: list[TweetUserFields] +# ): +# try: +# client = tweepy.Client( +# bearer_token=credentials.access_token.get_secret_value(), +# ) + +# params = { +# "id": list_id, +# "max_results": max_results, +# "pagination_token": None if pagination_token == "" else pagination_token, +# "user_auth": False +# } + +# params = (UserExpansionsBuilder(params) +# .add_expansions(expansions) +# .add_tweet_fields(tweet_fields) +# .add_user_fields(user_fields) +# .build()) + +# response = cast( +# Response, +# client.get_list_followers(**params) +# ) + +# meta = {} +# user_ids = [] +# usernames = [] +# next_token = None + +# if response.meta: +# meta = response.meta +# next_token = meta.get("next_token") + +# included = IncludesSerializer.serialize(response.includes) +# data = ResponseDataSerializer.serialize_list(response.data) + +# if response.data: +# user_ids = [str(item.id) for item in response.data] +# usernames = [item.username for item in response.data] + +# return data, included, meta, user_ids, usernames, next_token + +# raise Exception("No followers found") + +# except tweepy.TweepyException: +# raise + +# def run( +# self, +# input_data: Input, +# *, +# credentials: TwitterCredentials, +# **kwargs, +# ) -> BlockOutput: +# try: +# followers_data, included, meta, user_ids, usernames, next_token = self.get_list_followers( +# credentials, +# input_data.list_id, +# input_data.max_results, +# input_data.pagination_token, +# input_data.expansions, +# input_data.tweet_fields, +# input_data.user_fields +# ) + +# if user_ids: +# yield "user_ids", user_ids +# if usernames: +# yield "usernames", usernames +# if next_token: +# yield "next_token", next_token +# if followers_data: +# yield "data", followers_data +# if included: +# yield "included", included +# if meta: +# yield "meta", meta + +# except Exception as e: +# yield "error", handle_tweepy_exception(e) + +# class TwitterGetFollowedListsBlock(Block): +# """ +# Gets lists followed by a specified Twitter user +# """ + +# class Input(UserExpansionInputs): +# credentials: TwitterCredentialsInput = TwitterCredentialsField( +# ["follows.read", "users.read", "list.read", "offline.access"] +# ) + +# user_id: str = SchemaField( +# description="The user ID whose followed Lists to retrieve", +# placeholder="Enter user ID", +# required=True +# ) + +# max_results: int = SchemaField( +# description="Max number of results per page (1-100)", +# placeholder="Enter max results", +# default=10, +# advanced=True, +# ) + +# pagination_token: str = SchemaField( +# description="Token for pagination", +# placeholder="Enter pagination token", +# default="", +# advanced=True, +# ) + +# class Output(BlockSchema): +# list_ids: list[str] = SchemaField(description="List of list IDs") +# list_names: list[str] = SchemaField(description="List of list names") +# data: list[dict] = SchemaField(description="Complete list data") +# includes: dict = SchemaField(description="Additional data requested via expansions") +# meta: dict = SchemaField(description="Metadata about the response") +# next_token: str = SchemaField(description="Token for next page of results") +# error: str = SchemaField(description="Error message if the request failed") + +# def __init__(self): +# super().__init__( +# id="0e18bbfc-a62f-11ef-94fa-1f1e174b809e", +# description="This block retrieves all Lists a specified user follows.", +# categories={BlockCategory.SOCIAL}, +# input_schema=TwitterGetFollowedListsBlock.Input, +# output_schema=TwitterGetFollowedListsBlock.Output, +# test_input={ +# "user_id": "123456789", +# "max_results": 10, +# "pagination_token": None, +# "credentials": TEST_CREDENTIALS_INPUT, +# "expansions": [], +# "tweet_fields": [], +# "user_fields": [] +# }, +# test_credentials=TEST_CREDENTIALS, +# test_output=[ +# ("list_ids", ["12345"]), +# ("list_names", ["Test List"]), +# ("data", {"followed_lists": [{"id": "12345", "name": "Test List"}]}), +# ("includes", {}), +# ("meta", {}), +# ("next_token", None), +# ("error", "") +# ], +# test_mock={ +# "get_followed_lists": lambda *args, **kwargs: ({ +# "followed_lists": [{"id": "12345", "name": "Test List"}] +# }, {}, {}, ["12345"], ["Test List"], None) +# } +# ) + +# @staticmethod +# def get_followed_lists( +# credentials: TwitterCredentials, +# user_id: str, +# max_results: int, +# pagination_token: str, +# expansions: list[UserExpansions], +# tweet_fields: list[TweetFields], +# user_fields: list[TweetUserFields] +# ): +# try: +# client = tweepy.Client( +# bearer_token=credentials.access_token.get_secret_value(), +# ) + +# params = { +# "id": user_id, +# "max_results": max_results, +# "pagination_token": None if pagination_token == "" else pagination_token, +# "user_auth": False +# } + +# params = (UserExpansionsBuilder(params) +# .add_expansions(expansions) +# .add_tweet_fields(tweet_fields) +# .add_user_fields(user_fields) +# .build()) + +# response = cast( +# Response, +# client.get_followed_lists(**params) +# ) + +# meta = {} +# list_ids = [] +# list_names = [] +# next_token = None + +# if response.meta: +# meta = response.meta +# next_token = meta.get("next_token") + +# included = IncludesSerializer.serialize(response.includes) +# data = ResponseDataSerializer.serialize_list(response.data) + +# if response.data: +# list_ids = [str(item.id) for item in response.data] +# list_names = [item.name for item in response.data] + +# return data, included, meta, list_ids, list_names, next_token + +# raise Exception("No followed lists found") + +# except tweepy.TweepyException: +# raise + +# def run( +# self, +# input_data: Input, +# *, +# credentials: TwitterCredentials, +# **kwargs, +# ) -> BlockOutput: +# try: +# lists_data, included, meta, list_ids, list_names, next_token = self.get_followed_lists( +# credentials, +# input_data.user_id, +# input_data.max_results, +# input_data.pagination_token, +# input_data.expansions, +# input_data.tweet_fields, +# input_data.user_fields +# ) + +# if list_ids: +# yield "list_ids", list_ids +# if list_names: +# yield "list_names", list_names +# if next_token: +# yield "next_token", next_token +# if lists_data: +# yield "data", lists_data +# if included: +# yield "includes", included +# if meta: +# yield "meta", meta + +# except Exception as e: +# yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/list_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/lists/list_lookup.py new file mode 100644 index 0000000000..5d5d3da6a4 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/lists/list_lookup.py @@ -0,0 +1,348 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import ListExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ListExpansionInputs, + ListExpansionsFilter, + ListFieldsFilter, + TweetUserFieldsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterGetListBlock(Block): + """ + Gets information about a Twitter List specified by ID + """ + + class Input(ListExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List to lookup", + placeholder="Enter list ID", + required=True, + ) + + class Output(BlockSchema): + # Common outputs + id: str = SchemaField(description="ID of the Twitter List") + name: str = SchemaField(description="Name of the Twitter List") + owner_id: str = SchemaField(description="ID of the List owner") + owner_username: str = SchemaField(description="Username of the List owner") + + # Complete outputs + data: dict = SchemaField(description="Complete list data") + included: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Metadata about the response") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="34ebc80a-a62f-11ef-9c2a-3fcab6c07079", + description="This block retrieves information about a specified Twitter List.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetListBlock.Input, + output_schema=TwitterGetListBlock.Output, + test_input={ + "list_id": "84839422", + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "list_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("id", "84839422"), + ("name", "Official Twitter Accounts"), + ("owner_id", "2244994945"), + ("owner_username", "TwitterAPI"), + ("data", {"id": "84839422", "name": "Official Twitter Accounts"}), + ], + test_mock={ + "get_list": lambda *args, **kwargs: ( + {"id": "84839422", "name": "Official Twitter Accounts"}, + {}, + {}, + "2244994945", + "TwitterAPI", + ) + }, + ) + + @staticmethod + def get_list( + credentials: TwitterCredentials, + list_id: str, + expansions: ListExpansionsFilter | None, + user_fields: TweetUserFieldsFilter | None, + list_fields: ListFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = {"id": list_id, "user_auth": False} + + params = ( + ListExpansionsBuilder(params) + .add_expansions(expansions) + .add_user_fields(user_fields) + .add_list_fields(list_fields) + .build() + ) + + response = cast(Response, client.get_list(**params)) + + meta = {} + owner_id = "" + owner_username = "" + included = {} + + if response.includes: + included = IncludesSerializer.serialize(response.includes) + + if "users" in included: + owner_id = str(included["users"][0]["id"]) + owner_username = included["users"][0]["username"] + + if response.meta: + meta = response.meta + + if response.data: + data_dict = ResponseDataSerializer.serialize_dict(response.data) + return data_dict, included, meta, owner_id, owner_username + + raise Exception("List not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + list_data, included, meta, owner_id, owner_username = self.get_list( + credentials, + input_data.list_id, + input_data.expansions, + input_data.user_fields, + input_data.list_fields, + ) + + yield "id", str(list_data["id"]) + yield "name", list_data["name"] + if owner_id: + yield "owner_id", owner_id + if owner_username: + yield "owner_username", owner_username + yield "data", {"id": list_data["id"], "name": list_data["name"]} + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetOwnedListsBlock(Block): + """ + Gets all Lists owned by the specified user + """ + + class Input(ListExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "list.read", "offline.access"] + ) + + user_id: str = SchemaField( + description="The user ID whose owned Lists to retrieve", + placeholder="Enter user ID", + required=True, + ) + + max_results: int | None = SchemaField( + description="Maximum number of results per page (1-100)", + placeholder="Enter max results (default 100)", + advanced=True, + default=10, + ) + + pagination_token: str | None = SchemaField( + description="Token for pagination", + placeholder="Enter pagination token", + advanced=True, + default="", + ) + + class Output(BlockSchema): + # Common outputs + list_ids: list[str] = SchemaField(description="List ids of the owned lists") + list_names: list[str] = SchemaField(description="List names of the owned lists") + next_token: str = SchemaField(description="Token for next page of results") + + # Complete outputs + data: list[dict] = SchemaField(description="Complete owned lists data") + included: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Metadata about the response") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="2b6bdb26-a62f-11ef-a9ce-ff89c2568726", + description="This block retrieves all Lists owned by a specified Twitter user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetOwnedListsBlock.Input, + output_schema=TwitterGetOwnedListsBlock.Output, + test_input={ + "user_id": "2244994945", + "max_results": 10, + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "list_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("list_ids", ["84839422"]), + ("list_names", ["Official Twitter Accounts"]), + ("data", [{"id": "84839422", "name": "Official Twitter Accounts"}]), + ], + test_mock={ + "get_owned_lists": lambda *args, **kwargs: ( + [{"id": "84839422", "name": "Official Twitter Accounts"}], + {}, + {}, + ["84839422"], + ["Official Twitter Accounts"], + None, + ) + }, + ) + + @staticmethod + def get_owned_lists( + credentials: TwitterCredentials, + user_id: str, + max_results: int | None, + pagination_token: str | None, + expansions: ListExpansionsFilter | None, + user_fields: TweetUserFieldsFilter | None, + list_fields: ListFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": user_id, + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + ListExpansionsBuilder(params) + .add_expansions(expansions) + .add_user_fields(user_fields) + .add_list_fields(list_fields) + .build() + ) + + response = cast(Response, client.get_owned_lists(**params)) + + meta = {} + included = {} + list_ids = [] + list_names = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + if response.includes: + included = IncludesSerializer.serialize(response.includes) + + if response.data: + data = ResponseDataSerializer.serialize_list(response.data) + list_ids = [ + str(item.id) for item in response.data if hasattr(item, "id") + ] + list_names = [ + item.name for item in response.data if hasattr(item, "name") + ] + + return data, included, meta, list_ids, list_names, next_token + + raise Exception("User have no owned list") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + list_data, included, meta, list_ids, list_names, next_token = ( + self.get_owned_lists( + credentials, + input_data.user_id, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.user_fields, + input_data.list_fields, + ) + ) + + if list_ids: + yield "list_ids", list_ids + if list_names: + yield "list_names", list_names + if next_token: + yield "next_token", next_token + if list_data: + yield "data", list_data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/list_members.py b/autogpt_platform/backend/backend/blocks/twitter/lists/list_members.py new file mode 100644 index 0000000000..8e0bfd7405 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/lists/list_members.py @@ -0,0 +1,527 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import ( + ListExpansionsBuilder, + UserExpansionsBuilder, +) +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ListExpansionInputs, + ListExpansionsFilter, + ListFieldsFilter, + TweetFieldsFilter, + TweetUserFieldsFilter, + UserExpansionInputs, + UserExpansionsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterRemoveListMemberBlock(Block): + """ + Removes a member from a Twitter List that the authenticated user owns + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["list.write", "users.read", "tweet.read", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List to remove the member from", + placeholder="Enter list ID", + required=True, + ) + + user_id: str = SchemaField( + description="The ID of the user to remove from the List", + placeholder="Enter user ID to remove", + required=True, + ) + + class Output(BlockSchema): + success: bool = SchemaField( + description="Whether the member was successfully removed" + ) + error: str = SchemaField(description="Error message if the removal failed") + + def __init__(self): + super().__init__( + id="5a3d1320-a62f-11ef-b7ce-a79e7656bcb0", + description="This block removes a specified user from a Twitter List owned by the authenticated user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterRemoveListMemberBlock.Input, + output_schema=TwitterRemoveListMemberBlock.Output, + test_input={ + "list_id": "123456789", + "user_id": "987654321", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[("success", True)], + test_mock={"remove_list_member": lambda *args, **kwargs: True}, + ) + + @staticmethod + def remove_list_member(credentials: TwitterCredentials, list_id: str, user_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + client.remove_list_member(id=list_id, user_id=user_id, user_auth=False) + return True + except tweepy.TweepyException: + raise + except Exception: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.remove_list_member( + credentials, input_data.list_id, input_data.user_id + ) + yield "success", success + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterAddListMemberBlock(Block): + """ + Adds a member to a Twitter List that the authenticated user owns + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["list.write", "users.read", "tweet.read", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List to add the member to", + placeholder="Enter list ID", + required=True, + ) + + user_id: str = SchemaField( + description="The ID of the user to add to the List", + placeholder="Enter user ID to add", + required=True, + ) + + class Output(BlockSchema): + success: bool = SchemaField( + description="Whether the member was successfully added" + ) + error: str = SchemaField(description="Error message if the addition failed") + + def __init__(self): + super().__init__( + id="3ee8284e-a62f-11ef-84e4-8f6e2cbf0ddb", + description="This block adds a specified user to a Twitter List owned by the authenticated user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterAddListMemberBlock.Input, + output_schema=TwitterAddListMemberBlock.Output, + test_input={ + "list_id": "123456789", + "user_id": "987654321", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[("success", True)], + test_mock={"add_list_member": lambda *args, **kwargs: True}, + ) + + @staticmethod + def add_list_member(credentials: TwitterCredentials, list_id: str, user_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + client.add_list_member(id=list_id, user_id=user_id, user_auth=False) + return True + except tweepy.TweepyException: + raise + except Exception: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.add_list_member( + credentials, input_data.list_id, input_data.user_id + ) + yield "success", success + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetListMembersBlock(Block): + """ + Gets the members of a specified Twitter List + """ + + class Input(UserExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["list.read", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List to get members from", + placeholder="Enter list ID", + required=True, + ) + + max_results: int | None = SchemaField( + description="Maximum number of results per page (1-100)", + placeholder="Enter max results", + default=10, + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token for pagination of results", + placeholder="Enter pagination token", + default="", + advanced=True, + ) + + class Output(BlockSchema): + ids: list[str] = SchemaField(description="List of member user IDs") + usernames: list[str] = SchemaField(description="List of member usernames") + next_token: str = SchemaField(description="Next token for pagination") + + data: list[dict] = SchemaField( + description="Complete user data for list members" + ) + included: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Metadata including pagination info") + + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="4dba046e-a62f-11ef-b69a-87240c84b4c7", + description="This block retrieves the members of a specified Twitter List.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetListMembersBlock.Input, + output_schema=TwitterGetListMembersBlock.Output, + test_input={ + "list_id": "123456789", + "max_results": 2, + "pagination_token": None, + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["12345", "67890"]), + ("usernames", ["testuser1", "testuser2"]), + ( + "data", + [ + {"id": "12345", "username": "testuser1"}, + {"id": "67890", "username": "testuser2"}, + ], + ), + ], + test_mock={ + "get_list_members": lambda *args, **kwargs: ( + ["12345", "67890"], + ["testuser1", "testuser2"], + [ + {"id": "12345", "username": "testuser1"}, + {"id": "67890", "username": "testuser2"}, + ], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_list_members( + credentials: TwitterCredentials, + list_id: str, + max_results: int | None, + pagination_token: str | None, + expansions: UserExpansionsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": list_id, + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + UserExpansionsBuilder(params) + .add_expansions(expansions) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_list_members(**params)) + + meta = {} + included = {} + next_token = None + user_ids = [] + usernames = [] + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + if response.includes: + included = IncludesSerializer.serialize(response.includes) + + if response.data: + data = ResponseDataSerializer.serialize_list(response.data) + user_ids = [str(user.id) for user in response.data] + usernames = [user.username for user in response.data] + return user_ids, usernames, data, included, meta, next_token + + raise Exception("List members not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, usernames, data, included, meta, next_token = self.get_list_members( + credentials, + input_data.list_id, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.tweet_fields, + input_data.user_fields, + ) + + if ids: + yield "ids", ids + if usernames: + yield "usernames", usernames + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetListMembershipsBlock(Block): + """ + Gets all Lists that a specified user is a member of + """ + + class Input(ListExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["list.read", "offline.access"] + ) + + user_id: str = SchemaField( + description="The ID of the user whose List memberships to retrieve", + placeholder="Enter user ID", + required=True, + ) + + max_results: int | None = SchemaField( + description="Maximum number of results per page (1-100)", + placeholder="Enter max results", + advanced=True, + default=10, + ) + + pagination_token: str | None = SchemaField( + description="Token for pagination of results", + placeholder="Enter pagination token", + advanced=True, + default="", + ) + + class Output(BlockSchema): + list_ids: list[str] = SchemaField(description="List of list IDs") + next_token: str = SchemaField(description="Next token for pagination") + + data: list[dict] = SchemaField(description="List membership data") + included: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Metadata about pagination") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="46e6429c-a62f-11ef-81c0-2b55bc7823ba", + description="This block retrieves all Lists that a specified user is a member of.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetListMembershipsBlock.Input, + output_schema=TwitterGetListMembershipsBlock.Output, + test_input={ + "user_id": "123456789", + "max_results": 1, + "pagination_token": None, + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "list_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("list_ids", ["84839422"]), + ("data", [{"id": "84839422"}]), + ], + test_mock={ + "get_list_memberships": lambda *args, **kwargs: ( + [{"id": "84839422"}], + {}, + {}, + ["84839422"], + None, + ) + }, + ) + + @staticmethod + def get_list_memberships( + credentials: TwitterCredentials, + user_id: str, + max_results: int | None, + pagination_token: str | None, + expansions: ListExpansionsFilter | None, + user_fields: TweetUserFieldsFilter | None, + list_fields: ListFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": user_id, + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + ListExpansionsBuilder(params) + .add_expansions(expansions) + .add_user_fields(user_fields) + .add_list_fields(list_fields) + .build() + ) + + response = cast(Response, client.get_list_memberships(**params)) + + meta = {} + included = {} + next_token = None + list_ids = [] + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + if response.includes: + included = IncludesSerializer.serialize(response.includes) + + if response.data: + data = ResponseDataSerializer.serialize_list(response.data) + list_ids = [str(lst.id) for lst in response.data] + return data, included, meta, list_ids, next_token + + raise Exception("List memberships not found") + + except tweepy.TweepyException: + raise + except Exception: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + data, included, meta, list_ids, next_token = self.get_list_memberships( + credentials, + input_data.user_id, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.user_fields, + input_data.list_fields, + ) + + if list_ids: + yield "list_ids", list_ids + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/list_tweets_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/lists/list_tweets_lookup.py new file mode 100644 index 0000000000..99de955cb3 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/lists/list_tweets_lookup.py @@ -0,0 +1,217 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import TweetExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ExpansionFilter, + TweetExpansionInputs, + TweetFieldsFilter, + TweetMediaFieldsFilter, + TweetPlaceFieldsFilter, + TweetPollFieldsFilter, + TweetUserFieldsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterGetListTweetsBlock(Block): + """ + Gets tweets from a specified Twitter list + """ + + class Input(TweetExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List whose Tweets you would like to retrieve", + placeholder="Enter list ID", + required=True, + ) + + max_results: int | None = SchemaField( + description="Maximum number of results per page (1-100)", + placeholder="Enter max results", + default=10, + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token for paginating through results", + placeholder="Enter pagination token", + default="", + advanced=True, + ) + + class Output(BlockSchema): + # Common outputs + tweet_ids: list[str] = SchemaField(description="List of tweet IDs") + texts: list[str] = SchemaField(description="List of tweet texts") + next_token: str = SchemaField(description="Token for next page of results") + + # Complete outputs + data: list[dict] = SchemaField(description="Complete list tweets data") + included: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField( + description="Response metadata including pagination tokens" + ) + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="6657edb0-a62f-11ef-8c10-0326d832467d", + description="This block retrieves tweets from a specified Twitter list.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetListTweetsBlock.Input, + output_schema=TwitterGetListTweetsBlock.Output, + test_input={ + "list_id": "84839422", + "max_results": 1, + "pagination_token": None, + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "media_fields": None, + "place_fields": None, + "poll_fields": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("tweet_ids", ["1234567890"]), + ("texts", ["Test tweet"]), + ("data", [{"id": "1234567890", "text": "Test tweet"}]), + ], + test_mock={ + "get_list_tweets": lambda *args, **kwargs: ( + [{"id": "1234567890", "text": "Test tweet"}], + {}, + {}, + ["1234567890"], + ["Test tweet"], + None, + ) + }, + ) + + @staticmethod + def get_list_tweets( + credentials: TwitterCredentials, + list_id: str, + max_results: int | None, + pagination_token: str | None, + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": list_id, + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_list_tweets(**params)) + + meta = {} + included = {} + tweet_ids = [] + texts = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + if response.includes: + included = IncludesSerializer.serialize(response.includes) + + if response.data: + data = ResponseDataSerializer.serialize_list(response.data) + tweet_ids = [str(item.id) for item in response.data] + texts = [item.text for item in response.data] + + return data, included, meta, tweet_ids, texts, next_token + + raise Exception("No tweets found in this list") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + list_data, included, meta, tweet_ids, texts, next_token = ( + self.get_list_tweets( + credentials, + input_data.list_id, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + ) + + if tweet_ids: + yield "tweet_ids", tweet_ids + if texts: + yield "texts", texts + if next_token: + yield "next_token", next_token + if list_data: + yield "data", list_data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/manage_lists.py b/autogpt_platform/backend/backend/blocks/twitter/lists/manage_lists.py new file mode 100644 index 0000000000..490a841e4a --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/lists/manage_lists.py @@ -0,0 +1,278 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterDeleteListBlock(Block): + """ + Deletes a Twitter List owned by the authenticated user + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["list.write", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List to be deleted", + placeholder="Enter list ID", + required=True, + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the deletion was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="843c6892-a62f-11ef-a5c8-b71239a78d3b", + description="This block deletes a specified Twitter List owned by the authenticated user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterDeleteListBlock.Input, + output_schema=TwitterDeleteListBlock.Output, + test_input={"list_id": "1234567890", "credentials": TEST_CREDENTIALS_INPUT}, + test_credentials=TEST_CREDENTIALS, + test_output=[("success", True)], + test_mock={"delete_list": lambda *args, **kwargs: True}, + ) + + @staticmethod + def delete_list(credentials: TwitterCredentials, list_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.delete_list(id=list_id, user_auth=False) + return True + + except tweepy.TweepyException: + raise + except Exception: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.delete_list(credentials, input_data.list_id) + yield "success", success + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterUpdateListBlock(Block): + """ + Updates a Twitter List owned by the authenticated user + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["list.write", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List to be updated", + placeholder="Enter list ID", + advanced=False, + ) + + name: str | None = SchemaField( + description="New name for the List", + placeholder="Enter list name", + default="", + advanced=False, + ) + + description: str | None = SchemaField( + description="New description for the List", + placeholder="Enter list description", + default="", + advanced=False, + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the update was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="7d12630a-a62f-11ef-90c9-8f5a996612c3", + description="This block updates a specified Twitter List owned by the authenticated user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterUpdateListBlock.Input, + output_schema=TwitterUpdateListBlock.Output, + test_input={ + "list_id": "1234567890", + "name": "Updated List Name", + "description": "Updated List Description", + "private": True, + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[("success", True)], + test_mock={"update_list": lambda *args, **kwargs: True}, + ) + + @staticmethod + def update_list( + credentials: TwitterCredentials, + list_id: str, + name: str | None, + description: str | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.update_list( + id=list_id, + name=None if name == "" else name, + description=None if description == "" else description, + user_auth=False, + ) + return True + + except tweepy.TweepyException: + raise + except Exception: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.update_list( + credentials, input_data.list_id, input_data.name, input_data.description + ) + yield "success", success + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterCreateListBlock(Block): + """ + Creates a Twitter List owned by the authenticated user + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["list.write", "offline.access"] + ) + + name: str = SchemaField( + description="The name of the List to be created", + placeholder="Enter list name", + advanced=False, + default="", + ) + + description: str | None = SchemaField( + description="Description of the List", + placeholder="Enter list description", + advanced=False, + default="", + ) + + private: bool = SchemaField( + description="Whether the List should be private", + advanced=False, + default=False, + ) + + class Output(BlockSchema): + url: str = SchemaField(description="URL of the created list") + list_id: str = SchemaField(description="ID of the created list") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="724148ba-a62f-11ef-89ba-5349b813ef5f", + description="This block creates a new Twitter List for the authenticated user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterCreateListBlock.Input, + output_schema=TwitterCreateListBlock.Output, + test_input={ + "name": "New List Name", + "description": "New List Description", + "private": True, + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("list_id", "1234567890"), + ("url", "https://twitter.com/i/lists/1234567890"), + ], + test_mock={"create_list": lambda *args, **kwargs: ("1234567890")}, + ) + + @staticmethod + def create_list( + credentials: TwitterCredentials, + name: str, + description: str | None, + private: bool, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + response = cast( + Response, + client.create_list( + name=None if name == "" else name, + description=None if description == "" else description, + private=private, + user_auth=False, + ), + ) + + list_id = str(response.data["id"]) + + return list_id + + except tweepy.TweepyException: + raise + except Exception: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + list_id = self.create_list( + credentials, input_data.name, input_data.description, input_data.private + ) + yield "list_id", list_id + yield "url", f"https://twitter.com/i/lists/{list_id}" + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/pinned_lists.py b/autogpt_platform/backend/backend/blocks/twitter/lists/pinned_lists.py new file mode 100644 index 0000000000..f9b54cfdac --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/lists/pinned_lists.py @@ -0,0 +1,285 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import ListExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ListExpansionInputs, + ListExpansionsFilter, + ListFieldsFilter, + TweetUserFieldsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterUnpinListBlock(Block): + """ + Enables the authenticated user to unpin a List. + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["list.write", "users.read", "tweet.read", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List to unpin", + placeholder="Enter list ID", + required=True, + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the unpin was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="a099c034-a62f-11ef-9622-47d0ceb73555", + description="This block allows the authenticated user to unpin a specified List.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterUnpinListBlock.Input, + output_schema=TwitterUnpinListBlock.Output, + test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT}, + test_credentials=TEST_CREDENTIALS, + test_output=[("success", True)], + test_mock={"unpin_list": lambda *args, **kwargs: True}, + ) + + @staticmethod + def unpin_list(credentials: TwitterCredentials, list_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.unpin_list(list_id=list_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + except Exception: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.unpin_list(credentials, input_data.list_id) + yield "success", success + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterPinListBlock(Block): + """ + Enables the authenticated user to pin a List. + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["list.write", "users.read", "tweet.read", "offline.access"] + ) + + list_id: str = SchemaField( + description="The ID of the List to pin", + placeholder="Enter list ID", + required=True, + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the pin was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="8ec16e48-a62f-11ef-9f35-f3d6de43a802", + description="This block allows the authenticated user to pin a specified List.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterPinListBlock.Input, + output_schema=TwitterPinListBlock.Output, + test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT}, + test_credentials=TEST_CREDENTIALS, + test_output=[("success", True)], + test_mock={"pin_list": lambda *args, **kwargs: True}, + ) + + @staticmethod + def pin_list(credentials: TwitterCredentials, list_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.pin_list(list_id=list_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + except Exception: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.pin_list(credentials, input_data.list_id) + yield "success", success + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetPinnedListsBlock(Block): + """ + Returns the Lists pinned by the authenticated user. + """ + + class Input(ListExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["lists.read", "users.read", "offline.access"] + ) + + class Output(BlockSchema): + list_ids: list[str] = SchemaField(description="List IDs of the pinned lists") + list_names: list[str] = SchemaField( + description="List names of the pinned lists" + ) + + data: list[dict] = SchemaField( + description="Response data containing pinned lists" + ) + included: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Metadata about the response") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="97e03aae-a62f-11ef-bc53-5b89cb02888f", + description="This block returns the Lists pinned by the authenticated user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetPinnedListsBlock.Input, + output_schema=TwitterGetPinnedListsBlock.Output, + test_input={ + "expansions": None, + "list_fields": None, + "user_fields": None, + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("list_ids", ["84839422"]), + ("list_names", ["Twitter List"]), + ("data", [{"id": "84839422", "name": "Twitter List"}]), + ], + test_mock={ + "get_pinned_lists": lambda *args, **kwargs: ( + [{"id": "84839422", "name": "Twitter List"}], + {}, + {}, + ["84839422"], + ["Twitter List"], + ) + }, + ) + + @staticmethod + def get_pinned_lists( + credentials: TwitterCredentials, + expansions: ListExpansionsFilter | None, + user_fields: TweetUserFieldsFilter | None, + list_fields: ListFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = {"user_auth": False} + + params = ( + ListExpansionsBuilder(params) + .add_expansions(expansions) + .add_user_fields(user_fields) + .add_list_fields(list_fields) + .build() + ) + + response = cast(Response, client.get_pinned_lists(**params)) + + meta = {} + included = {} + list_ids = [] + list_names = [] + + if response.meta: + meta = response.meta + + if response.includes: + included = IncludesSerializer.serialize(response.includes) + + if response.data: + data = ResponseDataSerializer.serialize_list(response.data) + list_ids = [str(item.id) for item in response.data] + list_names = [item.name for item in response.data] + return data, included, meta, list_ids, list_names + + raise Exception("Lists not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + list_data, included, meta, list_ids, list_names = self.get_pinned_lists( + credentials, + input_data.expansions, + input_data.user_fields, + input_data.list_fields, + ) + + if list_ids: + yield "list_ids", list_ids + if list_names: + yield "list_names", list_names + if list_data: + yield "data", list_data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/spaces/search_spaces.py b/autogpt_platform/backend/backend/blocks/twitter/spaces/search_spaces.py new file mode 100644 index 0000000000..ad3399c3b1 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/spaces/search_spaces.py @@ -0,0 +1,195 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import SpaceExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + SpaceExpansionInputs, + SpaceExpansionsFilter, + SpaceFieldsFilter, + SpaceStatesFilter, + TweetUserFieldsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterSearchSpacesBlock(Block): + """ + Returns live or scheduled Spaces matching specified search terms [for a week only] + """ + + class Input(SpaceExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["spaces.read", "users.read", "tweet.read", "offline.access"] + ) + + query: str = SchemaField( + description="Search term to find in Space titles", + placeholder="Enter search query", + ) + + max_results: int | None = SchemaField( + description="Maximum number of results to return (1-100)", + placeholder="Enter max results", + default=10, + advanced=True, + ) + + state: SpaceStatesFilter = SchemaField( + description="Type of Spaces to return (live, scheduled, or all)", + placeholder="Enter state filter", + default=SpaceStatesFilter.all, + ) + + class Output(BlockSchema): + # Common outputs that user commonly uses + ids: list[str] = SchemaField(description="List of space IDs") + titles: list[str] = SchemaField(description="List of space titles") + host_ids: list = SchemaField(description="List of host IDs") + next_token: str = SchemaField(description="Next token for pagination") + + # Complete outputs for advanced use + data: list[dict] = SchemaField(description="Complete space data") + includes: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Metadata including pagination info") + + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="aaefdd48-a62f-11ef-a73c-3f44df63e276", + description="This block searches for Twitter Spaces based on specified terms.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterSearchSpacesBlock.Input, + output_schema=TwitterSearchSpacesBlock.Output, + test_input={ + "query": "tech", + "max_results": 1, + "state": "live", + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "space_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["1234"]), + ("titles", ["Tech Talk"]), + ("host_ids", ["5678"]), + ("data", [{"id": "1234", "title": "Tech Talk", "host_ids": ["5678"]}]), + ], + test_mock={ + "search_spaces": lambda *args, **kwargs: ( + [{"id": "1234", "title": "Tech Talk", "host_ids": ["5678"]}], + {}, + {}, + ["1234"], + ["Tech Talk"], + ["5678"], + None, + ) + }, + ) + + @staticmethod + def search_spaces( + credentials: TwitterCredentials, + query: str, + max_results: int | None, + state: SpaceStatesFilter, + expansions: SpaceExpansionsFilter | None, + space_fields: SpaceFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = {"query": query, "max_results": max_results, "state": state.value} + + params = ( + SpaceExpansionsBuilder(params) + .add_expansions(expansions) + .add_space_fields(space_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.search_spaces(**params)) + + meta = {} + next_token = "" + if response.meta: + meta = response.meta + if "next_token" in meta: + next_token = meta["next_token"] + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + ids = [str(space["id"]) for space in response.data if "id" in space] + titles = [space["title"] for space in data if "title" in space] + host_ids = [space["host_ids"] for space in data if "host_ids" in space] + + return data, included, meta, ids, titles, host_ids, next_token + + raise Exception("Spaces not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + data, included, meta, ids, titles, host_ids, next_token = ( + self.search_spaces( + credentials, + input_data.query, + input_data.max_results, + input_data.state, + input_data.expansions, + input_data.space_fields, + input_data.user_fields, + ) + ) + + if ids: + yield "ids", ids + if titles: + yield "titles", titles + if host_ids: + yield "host_ids", host_ids + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "includes", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/spaces/spaces_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/spaces/spaces_lookup.py new file mode 100644 index 0000000000..d7365e80d8 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/spaces/spaces_lookup.py @@ -0,0 +1,651 @@ +from typing import Literal, Union, cast + +import tweepy +from pydantic import BaseModel +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import ( + SpaceExpansionsBuilder, + TweetExpansionsBuilder, + UserExpansionsBuilder, +) +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ExpansionFilter, + SpaceExpansionInputs, + SpaceExpansionsFilter, + SpaceFieldsFilter, + TweetExpansionInputs, + TweetFieldsFilter, + TweetMediaFieldsFilter, + TweetPlaceFieldsFilter, + TweetPollFieldsFilter, + TweetUserFieldsFilter, + UserExpansionInputs, + UserExpansionsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class SpaceList(BaseModel): + discriminator: Literal["space_list"] + space_ids: list[str] = SchemaField( + description="List of Space IDs to lookup (up to 100)", + placeholder="Enter Space IDs", + default=[], + advanced=False, + ) + + +class UserList(BaseModel): + discriminator: Literal["user_list"] + user_ids: list[str] = SchemaField( + description="List of user IDs to lookup their Spaces (up to 100)", + placeholder="Enter user IDs", + default=[], + advanced=False, + ) + + +class TwitterGetSpacesBlock(Block): + """ + Gets information about multiple Twitter Spaces specified by Space IDs or creator user IDs + """ + + class Input(SpaceExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["spaces.read", "users.read", "offline.access"] + ) + + identifier: Union[SpaceList, UserList] = SchemaField( + discriminator="discriminator", + description="Choose whether to lookup spaces by their IDs or by creator user IDs", + advanced=False, + ) + + class Output(BlockSchema): + # Common outputs + ids: list[str] = SchemaField(description="List of space IDs") + titles: list[str] = SchemaField(description="List of space titles") + + # Complete outputs for advanced use + data: list[dict] = SchemaField(description="Complete space data") + includes: dict = SchemaField( + description="Additional data requested via expansions" + ) + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="d75bd7d8-a62f-11ef-b0d8-c7a9496f617f", + description="This block retrieves information about multiple Twitter Spaces.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetSpacesBlock.Input, + output_schema=TwitterGetSpacesBlock.Output, + test_input={ + "identifier": { + "discriminator": "space_list", + "space_ids": ["1DXxyRYNejbKM"], + }, + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "space_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["1DXxyRYNejbKM"]), + ("titles", ["Test Space"]), + ( + "data", + [ + { + "id": "1DXxyRYNejbKM", + "title": "Test Space", + "host_id": "1234567", + } + ], + ), + ], + test_mock={ + "get_spaces": lambda *args, **kwargs: ( + [ + { + "id": "1DXxyRYNejbKM", + "title": "Test Space", + "host_id": "1234567", + } + ], + {}, + ["1DXxyRYNejbKM"], + ["Test Space"], + ) + }, + ) + + @staticmethod + def get_spaces( + credentials: TwitterCredentials, + identifier: Union[SpaceList, UserList], + expansions: SpaceExpansionsFilter | None, + space_fields: SpaceFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "ids": ( + identifier.space_ids if isinstance(identifier, SpaceList) else None + ), + "user_ids": ( + identifier.user_ids if isinstance(identifier, UserList) else None + ), + } + + params = ( + SpaceExpansionsBuilder(params) + .add_expansions(expansions) + .add_space_fields(space_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_spaces(**params)) + + ids = [] + titles = [] + + included = IncludesSerializer.serialize(response.includes) + + if response.data: + data = ResponseDataSerializer.serialize_list(response.data) + ids = [space["id"] for space in data if "id" in space] + titles = [space["title"] for space in data if "title" in space] + + return data, included, ids, titles + + raise Exception("No spaces found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + data, included, ids, titles = self.get_spaces( + credentials, + input_data.identifier, + input_data.expansions, + input_data.space_fields, + input_data.user_fields, + ) + + if ids: + yield "ids", ids + if titles: + yield "titles", titles + + if data: + yield "data", data + if included: + yield "includes", included + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetSpaceByIdBlock(Block): + """ + Gets information about a single Twitter Space specified by Space ID + """ + + class Input(SpaceExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["spaces.read", "users.read", "offline.access"] + ) + + space_id: str = SchemaField( + description="Space ID to lookup", + placeholder="Enter Space ID", + required=True, + ) + + class Output(BlockSchema): + # Common outputs + id: str = SchemaField(description="Space ID") + title: str = SchemaField(description="Space title") + host_ids: list[str] = SchemaField(description="Host ID") + + # Complete outputs for advanced use + data: dict = SchemaField(description="Complete space data") + includes: dict = SchemaField( + description="Additional data requested via expansions" + ) + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="c79700de-a62f-11ef-ab20-fb32bf9d5a9d", + description="This block retrieves information about a single Twitter Space.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetSpaceByIdBlock.Input, + output_schema=TwitterGetSpaceByIdBlock.Output, + test_input={ + "space_id": "1DXxyRYNejbKM", + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "space_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("id", "1DXxyRYNejbKM"), + ("title", "Test Space"), + ("host_ids", ["1234567"]), + ( + "data", + { + "id": "1DXxyRYNejbKM", + "title": "Test Space", + "host_ids": ["1234567"], + }, + ), + ], + test_mock={ + "get_space": lambda *args, **kwargs: ( + { + "id": "1DXxyRYNejbKM", + "title": "Test Space", + "host_ids": ["1234567"], + }, + {}, + ) + }, + ) + + @staticmethod + def get_space( + credentials: TwitterCredentials, + space_id: str, + expansions: SpaceExpansionsFilter | None, + space_fields: SpaceFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": space_id, + } + + params = ( + SpaceExpansionsBuilder(params) + .add_expansions(expansions) + .add_space_fields(space_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_space(**params)) + + includes = {} + if response.includes: + for key, value in response.includes.items(): + if isinstance(value, list): + includes[key] = [ + item.data if hasattr(item, "data") else item + for item in value + ] + else: + includes[key] = value.data if hasattr(value, "data") else value + + data = {} + if response.data: + for key, value in response.data.items(): + if isinstance(value, list): + data[key] = [ + item.data if hasattr(item, "data") else item + for item in value + ] + else: + data[key] = value.data if hasattr(value, "data") else value + + return data, includes + + raise Exception("Space not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + space_data, includes = self.get_space( + credentials, + input_data.space_id, + input_data.expansions, + input_data.space_fields, + input_data.user_fields, + ) + + # Common outputs + if space_data: + if "id" in space_data: + yield "id", space_data.get("id") + + if "title" in space_data: + yield "title", space_data.get("title") + + if "host_ids" in space_data: + yield "host_ids", space_data.get("host_ids") + + if space_data: + yield "data", space_data + if includes: + yield "includes", includes + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +# Not tested yet, might have some problem +class TwitterGetSpaceBuyersBlock(Block): + """ + Gets list of users who purchased a ticket to the requested Space + """ + + class Input(UserExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["spaces.read", "users.read", "offline.access"] + ) + + space_id: str = SchemaField( + description="Space ID to lookup buyers for", + placeholder="Enter Space ID", + required=True, + ) + + class Output(BlockSchema): + # Common outputs + buyer_ids: list[str] = SchemaField(description="List of buyer IDs") + usernames: list[str] = SchemaField(description="List of buyer usernames") + + # Complete outputs for advanced use + data: list[dict] = SchemaField(description="Complete space buyers data") + includes: dict = SchemaField( + description="Additional data requested via expansions" + ) + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="c1c121a8-a62f-11ef-8b0e-d7b85f96a46f", + description="This block retrieves a list of users who purchased tickets to a Twitter Space.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetSpaceBuyersBlock.Input, + output_schema=TwitterGetSpaceBuyersBlock.Output, + test_input={ + "space_id": "1DXxyRYNejbKM", + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("buyer_ids", ["2244994945"]), + ("usernames", ["testuser"]), + ( + "data", + [{"id": "2244994945", "username": "testuser", "name": "Test User"}], + ), + ], + test_mock={ + "get_space_buyers": lambda *args, **kwargs: ( + [{"id": "2244994945", "username": "testuser", "name": "Test User"}], + {}, + ["2244994945"], + ["testuser"], + ) + }, + ) + + @staticmethod + def get_space_buyers( + credentials: TwitterCredentials, + space_id: str, + expansions: UserExpansionsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": space_id, + } + + params = ( + UserExpansionsBuilder(params) + .add_expansions(expansions) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_space_buyers(**params)) + + included = IncludesSerializer.serialize(response.includes) + + if response.data: + data = ResponseDataSerializer.serialize_list(response.data) + buyer_ids = [buyer["id"] for buyer in data] + usernames = [buyer["username"] for buyer in data] + + return data, included, buyer_ids, usernames + + raise Exception("No buyers found for this Space") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + buyers_data, included, buyer_ids, usernames = self.get_space_buyers( + credentials, + input_data.space_id, + input_data.expansions, + input_data.user_fields, + ) + + if buyer_ids: + yield "buyer_ids", buyer_ids + if usernames: + yield "usernames", usernames + + if buyers_data: + yield "data", buyers_data + if included: + yield "includes", included + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetSpaceTweetsBlock(Block): + """ + Gets list of Tweets shared in the requested Space + """ + + class Input(TweetExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["spaces.read", "users.read", "offline.access"] + ) + + space_id: str = SchemaField( + description="Space ID to lookup tweets for", + placeholder="Enter Space ID", + required=True, + ) + + class Output(BlockSchema): + # Common outputs + tweet_ids: list[str] = SchemaField(description="List of tweet IDs") + texts: list[str] = SchemaField(description="List of tweet texts") + + # Complete outputs for advanced use + data: list[dict] = SchemaField(description="Complete space tweets data") + includes: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Response metadata") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="b69731e6-a62f-11ef-b2d4-1bf14dd6aee4", + description="This block retrieves tweets shared in a Twitter Space.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetSpaceTweetsBlock.Input, + output_schema=TwitterGetSpaceTweetsBlock.Output, + test_input={ + "space_id": "1DXxyRYNejbKM", + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "media_fields": None, + "place_fields": None, + "poll_fields": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("tweet_ids", ["1234567890"]), + ("texts", ["Test tweet"]), + ("data", [{"id": "1234567890", "text": "Test tweet"}]), + ], + test_mock={ + "get_space_tweets": lambda *args, **kwargs: ( + [{"id": "1234567890", "text": "Test tweet"}], # data + {}, + ["1234567890"], + ["Test tweet"], + {}, + ) + }, + ) + + @staticmethod + def get_space_tweets( + credentials: TwitterCredentials, + space_id: str, + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": space_id, + } + + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_space_tweets(**params)) + + included = IncludesSerializer.serialize(response.includes) + + if response.data: + data = ResponseDataSerializer.serialize_list(response.data) + tweet_ids = [str(tweet["id"]) for tweet in data] + texts = [tweet["text"] for tweet in data] + + meta = response.meta or {} + + return data, included, tweet_ids, texts, meta + + raise Exception("No tweets found for this Space") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + tweets_data, included, tweet_ids, texts, meta = self.get_space_tweets( + credentials, + input_data.space_id, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + + if tweet_ids: + yield "tweet_ids", tweet_ids + if texts: + yield "texts", texts + + if tweets_data: + yield "data", tweets_data + if included: + yield "includes", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweepy_exceptions.py b/autogpt_platform/backend/backend/blocks/twitter/tweepy_exceptions.py new file mode 100644 index 0000000000..c190026998 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/tweepy_exceptions.py @@ -0,0 +1,20 @@ +import tweepy + + +def handle_tweepy_exception(e: Exception) -> str: + if isinstance(e, tweepy.BadRequest): + return f"Bad Request (400): {str(e)}" + elif isinstance(e, tweepy.Unauthorized): + return f"Unauthorized (401): {str(e)}" + elif isinstance(e, tweepy.Forbidden): + return f"Forbidden (403): {str(e)}" + elif isinstance(e, tweepy.NotFound): + return f"Not Found (404): {str(e)}" + elif isinstance(e, tweepy.TooManyRequests): + return f"Too Many Requests (429): {str(e)}" + elif isinstance(e, tweepy.TwitterServerError): + return f"Twitter Server Error (5xx): {str(e)}" + elif isinstance(e, tweepy.TweepyException): + return f"Tweepy Error: {str(e)}" + else: + return f"Unexpected error: {str(e)}" diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/bookmark.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/bookmark.py new file mode 100644 index 0000000000..9a3147903c --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/bookmark.py @@ -0,0 +1,372 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import TweetExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ExpansionFilter, + TweetExpansionInputs, + TweetFieldsFilter, + TweetMediaFieldsFilter, + TweetPlaceFieldsFilter, + TweetPollFieldsFilter, + TweetUserFieldsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterBookmarkTweetBlock(Block): + """ + Bookmark a tweet on Twitter + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "bookmark.write", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet to bookmark", + placeholder="Enter tweet ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the bookmark was successful") + error: str = SchemaField(description="Error message if the bookmark failed") + + def __init__(self): + super().__init__( + id="f33d67be-a62f-11ef-a797-ff83ec29ee8e", + description="This block bookmarks a tweet on Twitter.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterBookmarkTweetBlock.Input, + output_schema=TwitterBookmarkTweetBlock.Output, + test_input={ + "tweet_id": "1234567890", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"bookmark_tweet": lambda *args, **kwargs: True}, + ) + + @staticmethod + def bookmark_tweet( + credentials: TwitterCredentials, + tweet_id: str, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.bookmark(tweet_id) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.bookmark_tweet(credentials, input_data.tweet_id) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetBookmarkedTweetsBlock(Block): + """ + Get All your bookmarked tweets from Twitter + """ + + class Input(TweetExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "bookmark.read", "users.read", "offline.access"] + ) + + max_results: int | None = SchemaField( + description="Maximum number of results to return (1-100)", + placeholder="Enter max results", + default=10, + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token for pagination", + placeholder="Enter pagination token", + default="", + advanced=True, + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + id: list[str] = SchemaField(description="All Tweet IDs") + text: list[str] = SchemaField(description="All Tweet texts") + userId: list[str] = SchemaField(description="IDs of the tweet authors") + userName: list[str] = SchemaField(description="Usernames of the tweet authors") + + # Complete Outputs for advanced use + data: list[dict] = SchemaField(description="Complete Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField( + description="Provides metadata such as pagination info (next_token) or result counts" + ) + next_token: str = SchemaField(description="Next token for pagination") + + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="ed26783e-a62f-11ef-9a21-c77c57dd8a1f", + description="This block retrieves bookmarked tweets from Twitter.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetBookmarkedTweetsBlock.Input, + output_schema=TwitterGetBookmarkedTweetsBlock.Output, + test_input={ + "max_results": 2, + "pagination_token": None, + "expansions": None, + "media_fields": None, + "place_fields": None, + "poll_fields": None, + "tweet_fields": None, + "user_fields": None, + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("id", ["1234567890"]), + ("text", ["Test tweet"]), + ("userId", ["12345"]), + ("userName", ["testuser"]), + ("data", [{"id": "1234567890", "text": "Test tweet"}]), + ], + test_mock={ + "get_bookmarked_tweets": lambda *args, **kwargs: ( + ["1234567890"], + ["Test tweet"], + ["12345"], + ["testuser"], + [{"id": "1234567890", "text": "Test tweet"}], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_bookmarked_tweets( + credentials: TwitterCredentials, + max_results: int | None, + pagination_token: str | None, + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + } + + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast( + Response, + client.get_bookmarks(**params), + ) + + meta = {} + tweet_ids = [] + tweet_texts = [] + user_ids = [] + user_names = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + tweet_ids = [str(tweet.id) for tweet in response.data] + tweet_texts = [tweet.text for tweet in response.data] + + if "users" in included: + for user in included["users"]: + user_ids.append(str(user["id"])) + user_names.append(user["username"]) + + return ( + tweet_ids, + tweet_texts, + user_ids, + user_names, + data, + included, + meta, + next_token, + ) + + raise Exception("No bookmarked tweets found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, texts, user_ids, user_names, data, included, meta, next_token = ( + self.get_bookmarked_tweets( + credentials, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + ) + if ids: + yield "id", ids + if texts: + yield "text", texts + if user_ids: + yield "userId", user_ids + if user_names: + yield "userName", user_names + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + if next_token: + yield "next_token", next_token + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterRemoveBookmarkTweetBlock(Block): + """ + Remove a bookmark for a tweet on Twitter + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "bookmark.write", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet to remove bookmark from", + placeholder="Enter tweet ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField( + description="Whether the bookmark was successfully removed" + ) + error: str = SchemaField( + description="Error message if the bookmark removal failed" + ) + + def __init__(self): + super().__init__( + id="e4100684-a62f-11ef-9be9-770cb41a2616", + description="This block removes a bookmark from a tweet on Twitter.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterRemoveBookmarkTweetBlock.Input, + output_schema=TwitterRemoveBookmarkTweetBlock.Output, + test_input={ + "tweet_id": "1234567890", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"remove_bookmark_tweet": lambda *args, **kwargs: True}, + ) + + @staticmethod + def remove_bookmark_tweet( + credentials: TwitterCredentials, + tweet_id: str, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.remove_bookmark(tweet_id) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.remove_bookmark_tweet(credentials, input_data.tweet_id) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/hide.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/hide.py new file mode 100644 index 0000000000..ae9998e32d --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/hide.py @@ -0,0 +1,154 @@ +import tweepy + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterHideReplyBlock(Block): + """ + Hides a reply of one of your tweets + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "tweet.moderate.write", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet reply to hide", + placeholder="Enter tweet ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the operation was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="07d58b3e-a630-11ef-a030-93701d1a465e", + description="This block hides a reply to a tweet.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterHideReplyBlock.Input, + output_schema=TwitterHideReplyBlock.Output, + test_input={ + "tweet_id": "1234567890", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"hide_reply": lambda *args, **kwargs: True}, + ) + + @staticmethod + def hide_reply( + credentials: TwitterCredentials, + tweet_id: str, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.hide_reply(id=tweet_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.hide_reply( + credentials, + input_data.tweet_id, + ) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterUnhideReplyBlock(Block): + """ + Unhides a reply to a tweet + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "tweet.moderate.write", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet reply to unhide", + placeholder="Enter tweet ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the operation was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="fcf9e4e4-a62f-11ef-9d85-57d3d06b616a", + description="This block unhides a reply to a tweet.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterUnhideReplyBlock.Input, + output_schema=TwitterUnhideReplyBlock.Output, + test_input={ + "tweet_id": "1234567890", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"unhide_reply": lambda *args, **kwargs: True}, + ) + + @staticmethod + def unhide_reply( + credentials: TwitterCredentials, + tweet_id: str, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.unhide_reply(id=tweet_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.unhide_reply( + credentials, + input_data.tweet_id, + ) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/like.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/like.py new file mode 100644 index 0000000000..c1816a6ce3 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/like.py @@ -0,0 +1,576 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import ( + TweetExpansionsBuilder, + UserExpansionsBuilder, +) +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ExpansionFilter, + TweetExpansionInputs, + TweetFieldsFilter, + TweetMediaFieldsFilter, + TweetPlaceFieldsFilter, + TweetPollFieldsFilter, + TweetUserFieldsFilter, + UserExpansionInputs, + UserExpansionsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterLikeTweetBlock(Block): + """ + Likes a tweet + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "like.write", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet to like", + placeholder="Enter tweet ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the operation was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="4d0b4c5c-a630-11ef-8e08-1b14c507b347", + description="This block likes a tweet.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterLikeTweetBlock.Input, + output_schema=TwitterLikeTweetBlock.Output, + test_input={ + "tweet_id": "1234567890", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"like_tweet": lambda *args, **kwargs: True}, + ) + + @staticmethod + def like_tweet( + credentials: TwitterCredentials, + tweet_id: str, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.like(tweet_id=tweet_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.like_tweet( + credentials, + input_data.tweet_id, + ) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetLikingUsersBlock(Block): + """ + Gets information about users who liked a one of your tweet + """ + + class Input(UserExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "like.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet to get liking users for", + placeholder="Enter tweet ID", + ) + max_results: int | None = SchemaField( + description="Maximum number of results to return (1-100)", + placeholder="Enter max results", + default=10, + advanced=True, + ) + pagination_token: str | None = SchemaField( + description="Token for getting next/previous page of results", + placeholder="Enter pagination token", + default="", + advanced=True, + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + id: list[str] = SchemaField(description="All User IDs who liked the tweet") + username: list[str] = SchemaField( + description="All User usernames who liked the tweet" + ) + next_token: str = SchemaField(description="Next token for pagination") + + # Complete Outputs for advanced use + data: list[dict] = SchemaField(description="Complete Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField( + description="Provides metadata such as pagination info (next_token) or result counts" + ) + + # error + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="34275000-a630-11ef-b01e-5f00d9077c08", + description="This block gets information about users who liked a tweet.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetLikingUsersBlock.Input, + output_schema=TwitterGetLikingUsersBlock.Output, + test_input={ + "tweet_id": "1234567890", + "max_results": 1, + "pagination_token": None, + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("id", ["1234567890"]), + ("username", ["testuser"]), + ("data", [{"id": "1234567890", "username": "testuser"}]), + ], + test_mock={ + "get_liking_users": lambda *args, **kwargs: ( + ["1234567890"], + ["testuser"], + [{"id": "1234567890", "username": "testuser"}], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_liking_users( + credentials: TwitterCredentials, + tweet_id: str, + max_results: int | None, + pagination_token: str | None, + expansions: UserExpansionsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": tweet_id, + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + UserExpansionsBuilder(params) + .add_expansions(expansions) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_liking_users(**params)) + + if not response.data and not response.meta: + raise Exception("No liking users found") + + meta = {} + user_ids = [] + usernames = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + user_ids = [str(user.id) for user in response.data] + usernames = [user.username for user in response.data] + + return user_ids, usernames, data, included, meta, next_token + + raise Exception("No liking users found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, usernames, data, included, meta, next_token = self.get_liking_users( + credentials, + input_data.tweet_id, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.tweet_fields, + input_data.user_fields, + ) + if ids: + yield "id", ids + if usernames: + yield "username", usernames + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetLikedTweetsBlock(Block): + """ + Gets information about tweets liked by you + """ + + class Input(TweetExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "like.read", "offline.access"] + ) + + user_id: str = SchemaField( + description="ID of the user to get liked tweets for", + placeholder="Enter user ID", + ) + max_results: int | None = SchemaField( + description="Maximum number of results to return (5-100)", + placeholder="100", + default=10, + advanced=True, + ) + pagination_token: str | None = SchemaField( + description="Token for getting next/previous page of results", + placeholder="Enter pagination token", + default="", + advanced=True, + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + ids: list[str] = SchemaField(description="All Tweet IDs") + texts: list[str] = SchemaField(description="All Tweet texts") + userIds: list[str] = SchemaField( + description="List of user ids that authored the tweets" + ) + userNames: list[str] = SchemaField( + description="List of user names that authored the tweets" + ) + next_token: str = SchemaField(description="Next token for pagination") + + # Complete Outputs for advanced use + data: list[dict] = SchemaField(description="Complete Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField( + description="Provides metadata such as pagination info (next_token) or result counts" + ) + + # error + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="292e7c78-a630-11ef-9f40-df5dffaca106", + description="This block gets information about tweets liked by a user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetLikedTweetsBlock.Input, + output_schema=TwitterGetLikedTweetsBlock.Output, + test_input={ + "user_id": "1234567890", + "max_results": 2, + "pagination_token": None, + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "media_fields": None, + "place_fields": None, + "poll_fields": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["12345", "67890"]), + ("texts", ["Tweet 1", "Tweet 2"]), + ("userIds", ["67890", "67891"]), + ("userNames", ["testuser1", "testuser2"]), + ( + "data", + [ + {"id": "12345", "text": "Tweet 1"}, + {"id": "67890", "text": "Tweet 2"}, + ], + ), + ], + test_mock={ + "get_liked_tweets": lambda *args, **kwargs: ( + ["12345", "67890"], + ["Tweet 1", "Tweet 2"], + ["67890", "67891"], + ["testuser1", "testuser2"], + [ + {"id": "12345", "text": "Tweet 1"}, + {"id": "67890", "text": "Tweet 2"}, + ], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_liked_tweets( + credentials: TwitterCredentials, + user_id: str, + max_results: int | None, + pagination_token: str | None, + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": user_id, + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_liked_tweets(**params)) + + if not response.data and not response.meta: + raise Exception("No liked tweets found") + + meta = {} + tweet_ids = [] + tweet_texts = [] + user_ids = [] + user_names = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + tweet_ids = [str(tweet.id) for tweet in response.data] + tweet_texts = [tweet.text for tweet in response.data] + + if "users" in response.includes: + user_ids = [str(user["id"]) for user in response.includes["users"]] + user_names = [ + user["username"] for user in response.includes["users"] + ] + + return ( + tweet_ids, + tweet_texts, + user_ids, + user_names, + data, + included, + meta, + next_token, + ) + + raise Exception("No liked tweets found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, texts, user_ids, user_names, data, included, meta, next_token = ( + self.get_liked_tweets( + credentials, + input_data.user_id, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + ) + if ids: + yield "ids", ids + if texts: + yield "texts", texts + if user_ids: + yield "userIds", user_ids + if user_names: + yield "userNames", user_names + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterUnlikeTweetBlock(Block): + """ + Unlikes a tweet that was previously liked + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "like.write", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet to unlike", + placeholder="Enter tweet ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the operation was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="1ed5eab8-a630-11ef-8e21-cbbbc80cbb85", + description="This block unlikes a tweet.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterUnlikeTweetBlock.Input, + output_schema=TwitterUnlikeTweetBlock.Output, + test_input={ + "tweet_id": "1234567890", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"unlike_tweet": lambda *args, **kwargs: True}, + ) + + @staticmethod + def unlike_tweet( + credentials: TwitterCredentials, + tweet_id: str, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.unlike(tweet_id=tweet_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.unlike_tweet( + credentials, + input_data.tweet_id, + ) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/manage.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/manage.py new file mode 100644 index 0000000000..7c86c0abcd --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/manage.py @@ -0,0 +1,545 @@ +from datetime import datetime +from typing import List, Literal, Optional, Union, cast + +import tweepy +from pydantic import BaseModel +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import ( + TweetDurationBuilder, + TweetExpansionsBuilder, + TweetPostBuilder, + TweetSearchBuilder, +) +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ExpansionFilter, + TweetExpansionInputs, + TweetFieldsFilter, + TweetMediaFieldsFilter, + TweetPlaceFieldsFilter, + TweetPollFieldsFilter, + TweetReplySettingsFilter, + TweetTimeWindowInputs, + TweetUserFieldsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class Media(BaseModel): + discriminator: Literal["media"] + media_ids: Optional[List[str]] = None + media_tagged_user_ids: Optional[List[str]] = None + + +class DeepLink(BaseModel): + discriminator: Literal["deep_link"] + direct_message_deep_link: Optional[str] = None + + +class Poll(BaseModel): + discriminator: Literal["poll"] + poll_options: Optional[List[str]] = None + poll_duration_minutes: Optional[int] = None + + +class Place(BaseModel): + discriminator: Literal["place"] + place_id: Optional[str] = None + + +class Quote(BaseModel): + discriminator: Literal["quote"] + quote_tweet_id: Optional[str] = None + + +class TwitterPostTweetBlock(Block): + """ + Create a tweet on Twitter with the option to include one additional element such as a media, quote, or deep link. + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "tweet.write", "users.read", "offline.access"] + ) + + tweet_text: str | None = SchemaField( + description="Text of the tweet to post", + placeholder="Enter your tweet", + default=None, + advanced=False, + ) + + for_super_followers_only: bool = SchemaField( + description="Tweet exclusively for Super Followers", + placeholder="Enter for super followers only", + advanced=True, + default=False, + ) + + attachment: Union[Media, DeepLink, Poll, Place, Quote] | None = SchemaField( + discriminator="discriminator", + description="Additional tweet data (media, deep link, poll, place or quote)", + advanced=True, + ) + + exclude_reply_user_ids: Optional[List[str]] = SchemaField( + description="User IDs to exclude from reply Tweet thread. [ex - 6253282]", + placeholder="Enter user IDs to exclude", + advanced=True, + default=None, + ) + + in_reply_to_tweet_id: Optional[str] = SchemaField( + description="Tweet ID being replied to. Please note that in_reply_to_tweet_id needs to be in the request if exclude_reply_user_ids is present", + default=None, + placeholder="Enter in reply to tweet ID", + advanced=True, + ) + + reply_settings: TweetReplySettingsFilter = SchemaField( + description="Who can reply to the Tweet (mentionedUsers or following)", + placeholder="Enter reply settings", + advanced=True, + default=TweetReplySettingsFilter(All_Users=True), + ) + + class Output(BlockSchema): + tweet_id: str = SchemaField(description="ID of the created tweet") + tweet_url: str = SchemaField(description="URL to the tweet") + error: str = SchemaField( + description="Error message if the tweet posting failed" + ) + + def __init__(self): + super().__init__( + id="7bb0048a-a630-11ef-aeb8-abc0dadb9b12", + description="This block posts a tweet on Twitter.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterPostTweetBlock.Input, + output_schema=TwitterPostTweetBlock.Output, + test_input={ + "tweet_text": "This is a test tweet.", + "credentials": TEST_CREDENTIALS_INPUT, + "attachment": { + "discriminator": "deep_link", + "direct_message_deep_link": "https://twitter.com/messages/compose", + }, + "for_super_followers_only": False, + "exclude_reply_user_ids": [], + "in_reply_to_tweet_id": "", + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("tweet_id", "1234567890"), + ("tweet_url", "https://twitter.com/user/status/1234567890"), + ], + test_mock={ + "post_tweet": lambda *args, **kwargs: ( + "1234567890", + "https://twitter.com/user/status/1234567890", + ) + }, + ) + + def post_tweet( + self, + credentials: TwitterCredentials, + input_txt: str | None, + attachment: Union[Media, DeepLink, Poll, Place, Quote] | None, + for_super_followers_only: bool, + exclude_reply_user_ids: Optional[List[str]], + in_reply_to_tweet_id: Optional[str], + reply_settings: TweetReplySettingsFilter, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = ( + TweetPostBuilder() + .add_text(input_txt) + .add_super_followers(for_super_followers_only) + .add_reply_settings( + exclude_reply_user_ids or [], + in_reply_to_tweet_id or "", + reply_settings, + ) + ) + + if isinstance(attachment, Media): + params.add_media( + attachment.media_ids or [], attachment.media_tagged_user_ids or [] + ) + elif isinstance(attachment, DeepLink): + params.add_deep_link(attachment.direct_message_deep_link or "") + elif isinstance(attachment, Poll): + params.add_poll_options(attachment.poll_options or []) + params.add_poll_duration(attachment.poll_duration_minutes or 0) + elif isinstance(attachment, Place): + params.add_place(attachment.place_id or "") + elif isinstance(attachment, Quote): + params.add_quote(attachment.quote_tweet_id or "") + + tweet = cast(Response, client.create_tweet(**params.build())) + + if not tweet.data: + raise Exception("Failed to create tweet") + + tweet_id = tweet.data["id"] + tweet_url = f"https://twitter.com/user/status/{tweet_id}" + return str(tweet_id), tweet_url + + except tweepy.TweepyException: + raise + except Exception: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + tweet_id, tweet_url = self.post_tweet( + credentials, + input_data.tweet_text, + input_data.attachment, + input_data.for_super_followers_only, + input_data.exclude_reply_user_ids, + input_data.in_reply_to_tweet_id, + input_data.reply_settings, + ) + yield "tweet_id", tweet_id + yield "tweet_url", tweet_url + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterDeleteTweetBlock(Block): + """ + Deletes a tweet on Twitter using twitter Id + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "tweet.write", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet to delete", + placeholder="Enter tweet ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField( + description="Whether the tweet was successfully deleted" + ) + error: str = SchemaField( + description="Error message if the tweet deletion failed" + ) + + def __init__(self): + super().__init__( + id="761babf0-a630-11ef-a03d-abceb082f58f", + description="This block deletes a tweet on Twitter.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterDeleteTweetBlock.Input, + output_schema=TwitterDeleteTweetBlock.Output, + test_input={ + "tweet_id": "1234567890", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[("success", True)], + test_mock={"delete_tweet": lambda *args, **kwargs: True}, + ) + + @staticmethod + def delete_tweet(credentials: TwitterCredentials, tweet_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + client.delete_tweet(id=tweet_id, user_auth=False) + return True + except tweepy.TweepyException: + raise + except Exception: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.delete_tweet( + credentials, + input_data.tweet_id, + ) + yield "success", success + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterSearchRecentTweetsBlock(Block): + """ + Searches all public Tweets in Twitter history + """ + + class Input(TweetExpansionInputs, TweetTimeWindowInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "offline.access"] + ) + + query: str = SchemaField( + description="Search query (up to 1024 characters)", + placeholder="Enter search query", + ) + + max_results: int = SchemaField( + description="Maximum number of results per page (10-500)", + placeholder="Enter max results", + default=10, + advanced=True, + ) + + pagination: str | None = SchemaField( + description="Token for pagination", + default="", + placeholder="Enter pagination token", + advanced=True, + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + tweet_ids: list[str] = SchemaField(description="All Tweet IDs") + tweet_texts: list[str] = SchemaField(description="All Tweet texts") + next_token: str = SchemaField(description="Next token for pagination") + + # Complete Outputs for advanced use + data: list[dict] = SchemaField(description="Complete Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField( + description="Provides metadata such as pagination info (next_token) or result counts" + ) + + # error + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="53e5cf8e-a630-11ef-ba85-df6d666fa5d5", + description="This block searches all public Tweets in Twitter history.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterSearchRecentTweetsBlock.Input, + output_schema=TwitterSearchRecentTweetsBlock.Output, + test_input={ + "query": "from:twitterapi #twitterapi", + "credentials": TEST_CREDENTIALS_INPUT, + "max_results": 2, + "start_time": "2024-12-14T18:30:00.000Z", + "end_time": "2024-12-17T18:30:00.000Z", + "since_id": None, + "until_id": None, + "sort_order": None, + "pagination": None, + "expansions": None, + "media_fields": None, + "place_fields": None, + "poll_fields": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("tweet_ids", ["1373001119480344583", "1372627771717869568"]), + ( + "tweet_texts", + [ + "Looking to get started with the Twitter API but new to APIs in general?", + "Thanks to everyone who joined and made today a great session!", + ], + ), + ( + "data", + [ + { + "id": "1373001119480344583", + "text": "Looking to get started with the Twitter API but new to APIs in general?", + }, + { + "id": "1372627771717869568", + "text": "Thanks to everyone who joined and made today a great session!", + }, + ], + ), + ], + test_mock={ + "search_tweets": lambda *args, **kwargs: ( + ["1373001119480344583", "1372627771717869568"], + [ + "Looking to get started with the Twitter API but new to APIs in general?", + "Thanks to everyone who joined and made today a great session!", + ], + [ + { + "id": "1373001119480344583", + "text": "Looking to get started with the Twitter API but new to APIs in general?", + }, + { + "id": "1372627771717869568", + "text": "Thanks to everyone who joined and made today a great session!", + }, + ], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def search_tweets( + credentials: TwitterCredentials, + query: str, + max_results: int, + start_time: datetime | None, + end_time: datetime | None, + since_id: str | None, + until_id: str | None, + sort_order: str | None, + pagination: str | None, + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + # Building common params + params = ( + TweetSearchBuilder() + .add_query(query) + .add_pagination(max_results, pagination) + .build() + ) + + # Adding expansions to params If required by the user + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + # Adding time window to params If required by the user + params = ( + TweetDurationBuilder(params) + .add_start_time(start_time) + .add_end_time(end_time) + .add_since_id(since_id) + .add_until_id(until_id) + .add_sort_order(sort_order) + .build() + ) + + response = cast(Response, client.search_recent_tweets(**params)) + + if not response.data and not response.meta: + raise Exception("No tweets found") + + meta = {} + tweet_ids = [] + tweet_texts = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + tweet_ids = [str(tweet.id) for tweet in response.data] + tweet_texts = [tweet.text for tweet in response.data] + + return tweet_ids, tweet_texts, data, included, meta, next_token + + raise Exception("No tweets found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, texts, data, included, meta, next_token = self.search_tweets( + credentials, + input_data.query, + input_data.max_results, + input_data.start_time, + input_data.end_time, + input_data.since_id, + input_data.until_id, + input_data.sort_order, + input_data.pagination, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + if ids: + yield "tweet_ids", ids + if texts: + yield "tweet_texts", texts + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/quote.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/quote.py new file mode 100644 index 0000000000..d46b5647c9 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/quote.py @@ -0,0 +1,222 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import TweetExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ExpansionFilter, + TweetExcludesFilter, + TweetExpansionInputs, + TweetFieldsFilter, + TweetMediaFieldsFilter, + TweetPlaceFieldsFilter, + TweetPollFieldsFilter, + TweetUserFieldsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterGetQuoteTweetsBlock(Block): + """ + Gets quote tweets for a specified tweet ID + """ + + class Input(TweetExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet to get quotes for", + placeholder="Enter tweet ID", + ) + + max_results: int | None = SchemaField( + description="Number of results to return (max 100)", + default=10, + advanced=True, + ) + + exclude: TweetExcludesFilter | None = SchemaField( + description="Types of tweets to exclude", advanced=True, default=None + ) + + pagination_token: str | None = SchemaField( + description="Token for pagination", + advanced=True, + default="", + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + ids: list = SchemaField(description="All Tweet IDs ") + texts: list = SchemaField(description="All Tweet texts") + next_token: str = SchemaField(description="Next token for pagination") + + # Complete Outputs for advanced use + data: list[dict] = SchemaField(description="Complete Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField( + description="Provides metadata such as pagination info (next_token) or result counts" + ) + + # error + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="9fbdd208-a630-11ef-9b97-ab7a3a695ca3", + description="This block gets quote tweets for a specific tweet.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetQuoteTweetsBlock.Input, + output_schema=TwitterGetQuoteTweetsBlock.Output, + test_input={ + "tweet_id": "1234567890", + "max_results": 2, + "pagination_token": None, + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["12345", "67890"]), + ("texts", ["Tweet 1", "Tweet 2"]), + ( + "data", + [ + {"id": "12345", "text": "Tweet 1"}, + {"id": "67890", "text": "Tweet 2"}, + ], + ), + ], + test_mock={ + "get_quote_tweets": lambda *args, **kwargs: ( + ["12345", "67890"], + ["Tweet 1", "Tweet 2"], + [ + {"id": "12345", "text": "Tweet 1"}, + {"id": "67890", "text": "Tweet 2"}, + ], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_quote_tweets( + credentials: TwitterCredentials, + tweet_id: str, + max_results: int | None, + exclude: TweetExcludesFilter | None, + pagination_token: str | None, + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": tweet_id, + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "exclude": None if exclude == TweetExcludesFilter() else exclude, + "user_auth": False, + } + + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_quote_tweets(**params)) + + meta = {} + tweet_ids = [] + tweet_texts = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + tweet_ids = [str(tweet.id) for tweet in response.data] + tweet_texts = [tweet.text for tweet in response.data] + + return tweet_ids, tweet_texts, data, included, meta, next_token + + raise Exception("No quote tweets found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, texts, data, included, meta, next_token = self.get_quote_tweets( + credentials, + input_data.tweet_id, + input_data.max_results, + input_data.exclude, + input_data.pagination_token, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + if ids: + yield "ids", ids + if texts: + yield "texts", texts + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/retweet.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/retweet.py new file mode 100644 index 0000000000..d46216c085 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/retweet.py @@ -0,0 +1,363 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import UserExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + TweetFieldsFilter, + TweetUserFieldsFilter, + UserExpansionInputs, + UserExpansionsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterRetweetBlock(Block): + """ + Retweets a tweet on Twitter + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "tweet.write", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet to retweet", + placeholder="Enter tweet ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the retweet was successful") + error: str = SchemaField(description="Error message if the retweet failed") + + def __init__(self): + super().__init__( + id="bd7b8d3a-a630-11ef-be96-6f4aa4c3c4f4", + description="This block retweets a tweet on Twitter.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterRetweetBlock.Input, + output_schema=TwitterRetweetBlock.Output, + test_input={ + "tweet_id": "1234567890", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"retweet": lambda *args, **kwargs: True}, + ) + + @staticmethod + def retweet( + credentials: TwitterCredentials, + tweet_id: str, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.retweet( + tweet_id=tweet_id, + user_auth=False, + ) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.retweet( + credentials, + input_data.tweet_id, + ) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterRemoveRetweetBlock(Block): + """ + Removes a retweet on Twitter + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "tweet.write", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet to remove retweet", + placeholder="Enter tweet ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField( + description="Whether the retweet was successfully removed" + ) + error: str = SchemaField(description="Error message if the removal failed") + + def __init__(self): + super().__init__( + id="b6e663f0-a630-11ef-a7f0-8b9b0c542ff8", + description="This block removes a retweet on Twitter.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterRemoveRetweetBlock.Input, + output_schema=TwitterRemoveRetweetBlock.Output, + test_input={ + "tweet_id": "1234567890", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"remove_retweet": lambda *args, **kwargs: True}, + ) + + @staticmethod + def remove_retweet( + credentials: TwitterCredentials, + tweet_id: str, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.unretweet( + source_tweet_id=tweet_id, + user_auth=False, + ) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.remove_retweet( + credentials, + input_data.tweet_id, + ) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetRetweetersBlock(Block): + """ + Gets information about who has retweeted a tweet + """ + + class Input(UserExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="ID of the tweet to get retweeters for", + placeholder="Enter tweet ID", + ) + + max_results: int | None = SchemaField( + description="Maximum number of results per page (1-100)", + default=10, + placeholder="Enter max results", + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token for pagination", + placeholder="Enter pagination token", + default="", + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + ids: list = SchemaField(description="List of user ids who retweeted") + names: list = SchemaField(description="List of user names who retweeted") + usernames: list = SchemaField( + description="List of user usernames who retweeted" + ) + next_token: str = SchemaField(description="Token for next page of results") + + # Complete Outputs for advanced use + data: list[dict] = SchemaField(description="Complete Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField( + description="Provides metadata such as pagination info (next_token) or result counts" + ) + + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="ad7aa6fa-a630-11ef-a6b0-e7ca640aa030", + description="This block gets information about who has retweeted a tweet.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetRetweetersBlock.Input, + output_schema=TwitterGetRetweetersBlock.Output, + test_input={ + "tweet_id": "1234567890", + "credentials": TEST_CREDENTIALS_INPUT, + "max_results": 1, + "pagination_token": "", + "expansions": None, + "media_fields": None, + "place_fields": None, + "poll_fields": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["12345"]), + ("names", ["Test User"]), + ("usernames", ["testuser"]), + ( + "data", + [{"id": "12345", "name": "Test User", "username": "testuser"}], + ), + ], + test_mock={ + "get_retweeters": lambda *args, **kwargs: ( + [{"id": "12345", "name": "Test User", "username": "testuser"}], + {}, + {}, + ["12345"], + ["Test User"], + ["testuser"], + None, + ) + }, + ) + + @staticmethod + def get_retweeters( + credentials: TwitterCredentials, + tweet_id: str, + max_results: int | None, + pagination_token: str | None, + expansions: UserExpansionsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": tweet_id, + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + UserExpansionsBuilder(params) + .add_expansions(expansions) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_retweeters(**params)) + + meta = {} + ids = [] + names = [] + usernames = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + ids = [str(user.id) for user in response.data] + names = [user.name for user in response.data] + usernames = [user.username for user in response.data] + return data, included, meta, ids, names, usernames, next_token + + raise Exception("No retweeters found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + data, included, meta, ids, names, usernames, next_token = ( + self.get_retweeters( + credentials, + input_data.tweet_id, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.tweet_fields, + input_data.user_fields, + ) + ) + + if ids: + yield "ids", ids + if names: + yield "names", names + if usernames: + yield "usernames", usernames + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/timeline.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/timeline.py new file mode 100644 index 0000000000..9bcbcaa347 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/timeline.py @@ -0,0 +1,757 @@ +from datetime import datetime +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import ( + TweetDurationBuilder, + TweetExpansionsBuilder, +) +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ExpansionFilter, + TweetExpansionInputs, + TweetFieldsFilter, + TweetMediaFieldsFilter, + TweetPlaceFieldsFilter, + TweetPollFieldsFilter, + TweetTimeWindowInputs, + TweetUserFieldsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterGetUserMentionsBlock(Block): + """ + Returns Tweets where a single user is mentioned, just put that user id + """ + + class Input(TweetExpansionInputs, TweetTimeWindowInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "offline.access"] + ) + + user_id: str = SchemaField( + description="Unique identifier of the user for whom to return Tweets mentioning the user", + placeholder="Enter user ID", + ) + + max_results: int | None = SchemaField( + description="Number of tweets to retrieve (5-100)", + default=10, + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token for pagination", default="", advanced=True + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + ids: list[str] = SchemaField(description="List of Tweet IDs") + texts: list[str] = SchemaField(description="All Tweet texts") + + userIds: list[str] = SchemaField( + description="List of user ids that mentioned the user" + ) + userNames: list[str] = SchemaField( + description="List of user names that mentioned the user" + ) + next_token: str = SchemaField(description="Next token for pagination") + + # Complete Outputs for advanced use + data: list[dict] = SchemaField(description="Complete Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField( + description="Provides metadata such as pagination info (next_token) or result counts" + ) + + # error + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="e01c890c-a630-11ef-9e20-37da24888bd0", + description="This block retrieves Tweets mentioning a specific user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetUserMentionsBlock.Input, + output_schema=TwitterGetUserMentionsBlock.Output, + test_input={ + "user_id": "12345", + "credentials": TEST_CREDENTIALS_INPUT, + "max_results": 2, + "start_time": "2024-12-14T18:30:00.000Z", + "end_time": "2024-12-17T18:30:00.000Z", + "since_id": "", + "until_id": "", + "sort_order": None, + "pagination_token": None, + "expansions": None, + "media_fields": None, + "place_fields": None, + "poll_fields": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["1373001119480344583", "1372627771717869568"]), + ("texts", ["Test mention 1", "Test mention 2"]), + ("userIds", ["67890", "67891"]), + ("userNames", ["testuser1", "testuser2"]), + ( + "data", + [ + {"id": "1373001119480344583", "text": "Test mention 1"}, + {"id": "1372627771717869568", "text": "Test mention 2"}, + ], + ), + ], + test_mock={ + "get_mentions": lambda *args, **kwargs: ( + ["1373001119480344583", "1372627771717869568"], + ["Test mention 1", "Test mention 2"], + ["67890", "67891"], + ["testuser1", "testuser2"], + [ + {"id": "1373001119480344583", "text": "Test mention 1"}, + {"id": "1372627771717869568", "text": "Test mention 2"}, + ], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_mentions( + credentials: TwitterCredentials, + user_id: str, + max_results: int | None, + start_time: datetime | None, + end_time: datetime | None, + since_id: str | None, + until_id: str | None, + sort_order: str | None, + pagination: str | None, + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": user_id, + "max_results": max_results, + "pagination_token": None if pagination == "" else pagination, + "user_auth": False, + } + + # Adding expansions to params If required by the user + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + # Adding time window to params If required by the user + params = ( + TweetDurationBuilder(params) + .add_start_time(start_time) + .add_end_time(end_time) + .add_since_id(since_id) + .add_until_id(until_id) + .add_sort_order(sort_order) + .build() + ) + + response = cast( + Response, + client.get_users_mentions(**params), + ) + + if not response.data and not response.meta: + raise Exception("No tweets found") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + meta = response.meta or {} + next_token = meta.get("next_token", "") + + tweet_ids = [] + tweet_texts = [] + user_ids = [] + user_names = [] + + if response.data: + tweet_ids = [str(tweet.id) for tweet in response.data] + tweet_texts = [tweet.text for tweet in response.data] + + if "users" in included: + user_ids = [str(user["id"]) for user in included["users"]] + user_names = [user["username"] for user in included["users"]] + + return ( + tweet_ids, + tweet_texts, + user_ids, + user_names, + data, + included, + meta, + next_token, + ) + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, texts, user_ids, user_names, data, included, meta, next_token = ( + self.get_mentions( + credentials, + input_data.user_id, + input_data.max_results, + input_data.start_time, + input_data.end_time, + input_data.since_id, + input_data.until_id, + input_data.sort_order, + input_data.pagination_token, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + ) + if ids: + yield "ids", ids + if texts: + yield "texts", texts + if user_ids: + yield "userIds", user_ids + if user_names: + yield "userNames", user_names + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetHomeTimelineBlock(Block): + """ + Returns a collection of the most recent Tweets and Retweets posted by you and users you follow + """ + + class Input(TweetExpansionInputs, TweetTimeWindowInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "offline.access"] + ) + + max_results: int | None = SchemaField( + description="Number of tweets to retrieve (5-100)", + default=10, + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token for pagination", default="", advanced=True + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + ids: list[str] = SchemaField(description="List of Tweet IDs") + texts: list[str] = SchemaField(description="All Tweet texts") + + userIds: list[str] = SchemaField( + description="List of user ids that authored the tweets" + ) + userNames: list[str] = SchemaField( + description="List of user names that authored the tweets" + ) + next_token: str = SchemaField(description="Next token for pagination") + + # Complete Outputs for advanced use + data: list[dict] = SchemaField(description="Complete Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField( + description="Provides metadata such as pagination info (next_token) or result counts" + ) + + # error + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="d222a070-a630-11ef-a18a-3f52f76c6962", + description="This block retrieves the authenticated user's home timeline.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetHomeTimelineBlock.Input, + output_schema=TwitterGetHomeTimelineBlock.Output, + test_input={ + "credentials": TEST_CREDENTIALS_INPUT, + "max_results": 2, + "start_time": "2024-12-14T18:30:00.000Z", + "end_time": "2024-12-17T18:30:00.000Z", + "since_id": None, + "until_id": None, + "sort_order": None, + "pagination_token": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["1373001119480344583", "1372627771717869568"]), + ("texts", ["Test tweet 1", "Test tweet 2"]), + ("userIds", ["67890", "67891"]), + ("userNames", ["testuser1", "testuser2"]), + ( + "data", + [ + {"id": "1373001119480344583", "text": "Test tweet 1"}, + {"id": "1372627771717869568", "text": "Test tweet 2"}, + ], + ), + ], + test_mock={ + "get_timeline": lambda *args, **kwargs: ( + ["1373001119480344583", "1372627771717869568"], + ["Test tweet 1", "Test tweet 2"], + ["67890", "67891"], + ["testuser1", "testuser2"], + [ + {"id": "1373001119480344583", "text": "Test tweet 1"}, + {"id": "1372627771717869568", "text": "Test tweet 2"}, + ], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_timeline( + credentials: TwitterCredentials, + max_results: int | None, + start_time: datetime | None, + end_time: datetime | None, + since_id: str | None, + until_id: str | None, + sort_order: str | None, + pagination: str | None, + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "max_results": max_results, + "pagination_token": None if pagination == "" else pagination, + "user_auth": False, + } + + # Adding expansions to params If required by the user + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + # Adding time window to params If required by the user + params = ( + TweetDurationBuilder(params) + .add_start_time(start_time) + .add_end_time(end_time) + .add_since_id(since_id) + .add_until_id(until_id) + .add_sort_order(sort_order) + .build() + ) + + response = cast( + Response, + client.get_home_timeline(**params), + ) + + if not response.data and not response.meta: + raise Exception("No tweets found") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + meta = response.meta or {} + next_token = meta.get("next_token", "") + + tweet_ids = [] + tweet_texts = [] + user_ids = [] + user_names = [] + + if response.data: + tweet_ids = [str(tweet.id) for tweet in response.data] + tweet_texts = [tweet.text for tweet in response.data] + + if "users" in included: + user_ids = [str(user["id"]) for user in included["users"]] + user_names = [user["username"] for user in included["users"]] + + return ( + tweet_ids, + tweet_texts, + user_ids, + user_names, + data, + included, + meta, + next_token, + ) + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, texts, user_ids, user_names, data, included, meta, next_token = ( + self.get_timeline( + credentials, + input_data.max_results, + input_data.start_time, + input_data.end_time, + input_data.since_id, + input_data.until_id, + input_data.sort_order, + input_data.pagination_token, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + ) + if ids: + yield "ids", ids + if texts: + yield "texts", texts + if user_ids: + yield "userIds", user_ids + if user_names: + yield "userNames", user_names + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetUserTweetsBlock(Block): + """ + Returns Tweets composed by a single user, specified by the requested user ID + """ + + class Input(TweetExpansionInputs, TweetTimeWindowInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "offline.access"] + ) + + user_id: str = SchemaField( + description="Unique identifier of the Twitter account (user ID) for whom to return results", + placeholder="Enter user ID", + ) + + max_results: int | None = SchemaField( + description="Number of tweets to retrieve (5-100)", + default=10, + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token for pagination", default="", advanced=True + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + ids: list[str] = SchemaField(description="List of Tweet IDs") + texts: list[str] = SchemaField(description="All Tweet texts") + + userIds: list[str] = SchemaField( + description="List of user ids that authored the tweets" + ) + userNames: list[str] = SchemaField( + description="List of user names that authored the tweets" + ) + next_token: str = SchemaField(description="Next token for pagination") + + # Complete Outputs for advanced use + data: list[dict] = SchemaField(description="Complete Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField( + description="Provides metadata such as pagination info (next_token) or result counts" + ) + + # error + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="c44c3ef2-a630-11ef-9ff7-eb7b5ea3a5cb", + description="This block retrieves Tweets composed by a single user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetUserTweetsBlock.Input, + output_schema=TwitterGetUserTweetsBlock.Output, + test_input={ + "user_id": "12345", + "credentials": TEST_CREDENTIALS_INPUT, + "max_results": 2, + "start_time": "2024-12-14T18:30:00.000Z", + "end_time": "2024-12-17T18:30:00.000Z", + "since_id": None, + "until_id": None, + "sort_order": None, + "pagination_token": None, + "expansions": None, + "media_fields": None, + "place_fields": None, + "poll_fields": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["1373001119480344583", "1372627771717869568"]), + ("texts", ["Test tweet 1", "Test tweet 2"]), + ("userIds", ["67890", "67891"]), + ("userNames", ["testuser1", "testuser2"]), + ( + "data", + [ + {"id": "1373001119480344583", "text": "Test tweet 1"}, + {"id": "1372627771717869568", "text": "Test tweet 2"}, + ], + ), + ], + test_mock={ + "get_user_tweets": lambda *args, **kwargs: ( + ["1373001119480344583", "1372627771717869568"], + ["Test tweet 1", "Test tweet 2"], + ["67890", "67891"], + ["testuser1", "testuser2"], + [ + {"id": "1373001119480344583", "text": "Test tweet 1"}, + {"id": "1372627771717869568", "text": "Test tweet 2"}, + ], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_user_tweets( + credentials: TwitterCredentials, + user_id: str, + max_results: int | None, + start_time: datetime | None, + end_time: datetime | None, + since_id: str | None, + until_id: str | None, + sort_order: str | None, + pagination: str | None, + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": user_id, + "max_results": max_results, + "pagination_token": None if pagination == "" else pagination, + "user_auth": False, + } + + # Adding expansions to params If required by the user + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + # Adding time window to params If required by the user + params = ( + TweetDurationBuilder(params) + .add_start_time(start_time) + .add_end_time(end_time) + .add_since_id(since_id) + .add_until_id(until_id) + .add_sort_order(sort_order) + .build() + ) + + response = cast( + Response, + client.get_users_tweets(**params), + ) + + if not response.data and not response.meta: + raise Exception("No tweets found") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + meta = response.meta or {} + next_token = meta.get("next_token", "") + + tweet_ids = [] + tweet_texts = [] + user_ids = [] + user_names = [] + + if response.data: + tweet_ids = [str(tweet.id) for tweet in response.data] + tweet_texts = [tweet.text for tweet in response.data] + + if "users" in included: + user_ids = [str(user["id"]) for user in included["users"]] + user_names = [user["username"] for user in included["users"]] + + return ( + tweet_ids, + tweet_texts, + user_ids, + user_names, + data, + included, + meta, + next_token, + ) + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, texts, user_ids, user_names, data, included, meta, next_token = ( + self.get_user_tweets( + credentials, + input_data.user_id, + input_data.max_results, + input_data.start_time, + input_data.end_time, + input_data.since_id, + input_data.until_id, + input_data.sort_order, + input_data.pagination_token, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + ) + if ids: + yield "ids", ids + if texts: + yield "texts", texts + if user_ids: + yield "userIds", user_ids + if user_names: + yield "userNames", user_names + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/tweet_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/tweet_lookup.py new file mode 100644 index 0000000000..54d4f5b43e --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/tweet_lookup.py @@ -0,0 +1,361 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import TweetExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + ExpansionFilter, + TweetExpansionInputs, + TweetFieldsFilter, + TweetMediaFieldsFilter, + TweetPlaceFieldsFilter, + TweetPollFieldsFilter, + TweetUserFieldsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterGetTweetBlock(Block): + """ + Returns information about a single Tweet specified by the requested ID + """ + + class Input(TweetExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "offline.access"] + ) + + tweet_id: str = SchemaField( + description="Unique identifier of the Tweet to request (ex: 1460323737035677698)", + placeholder="Enter tweet ID", + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + id: str = SchemaField(description="Tweet ID") + text: str = SchemaField(description="Tweet text") + userId: str = SchemaField(description="ID of the tweet author") + userName: str = SchemaField(description="Username of the tweet author") + + # Complete Outputs for advanced use + data: dict = SchemaField(description="Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField(description="Metadata about the tweet") + + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="f5155c3a-a630-11ef-9cc1-a309988b4d92", + description="This block retrieves information about a specific Tweet.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetTweetBlock.Input, + output_schema=TwitterGetTweetBlock.Output, + test_input={ + "tweet_id": "1460323737035677698", + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "media_fields": None, + "place_fields": None, + "poll_fields": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("id", "1460323737035677698"), + ("text", "Test tweet content"), + ("userId", "12345"), + ("userName", "testuser"), + ("data", {"id": "1460323737035677698", "text": "Test tweet content"}), + ("included", {"users": [{"id": "12345", "username": "testuser"}]}), + ("meta", {"result_count": 1}), + ], + test_mock={ + "get_tweet": lambda *args, **kwargs: ( + {"id": "1460323737035677698", "text": "Test tweet content"}, + {"users": [{"id": "12345", "username": "testuser"}]}, + {"result_count": 1}, + "12345", + "testuser", + ) + }, + ) + + @staticmethod + def get_tweet( + credentials: TwitterCredentials, + tweet_id: str, + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + params = {"id": tweet_id, "user_auth": False} + + # Adding expansions to params If required by the user + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_tweet(**params)) + + meta = {} + user_id = "" + user_name = "" + + if response.meta: + meta = response.meta + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_dict(response.data) + + if included and "users" in included: + user_id = str(included["users"][0]["id"]) + user_name = included["users"][0]["username"] + + if response.data: + return data, included, meta, user_id, user_name + + raise Exception("Tweet not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + + tweet_data, included, meta, user_id, user_name = self.get_tweet( + credentials, + input_data.tweet_id, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + + yield "id", str(tweet_data["id"]) + yield "text", tweet_data["text"] + if user_id: + yield "userId", user_id + if user_name: + yield "userName", user_name + yield "data", tweet_data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetTweetsBlock(Block): + """ + Returns information about multiple Tweets specified by the requested IDs + """ + + class Input(TweetExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["tweet.read", "users.read", "offline.access"] + ) + + tweet_ids: list[str] = SchemaField( + description="List of Tweet IDs to request (up to 100)", + placeholder="Enter tweet IDs", + ) + + class Output(BlockSchema): + # Common Outputs that user commonly uses + ids: list[str] = SchemaField(description="All Tweet IDs") + texts: list[str] = SchemaField(description="All Tweet texts") + userIds: list[str] = SchemaField( + description="List of user ids that authored the tweets" + ) + userNames: list[str] = SchemaField( + description="List of user names that authored the tweets" + ) + + # Complete Outputs for advanced use + data: list[dict] = SchemaField(description="Complete Tweet data") + included: dict = SchemaField( + description="Additional data that you have requested (Optional) via Expansions field" + ) + meta: dict = SchemaField(description="Metadata about the tweets") + + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="e7cc5420-a630-11ef-bfaf-13bdd8096a51", + description="This block retrieves information about multiple Tweets.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetTweetsBlock.Input, + output_schema=TwitterGetTweetsBlock.Output, + test_input={ + "tweet_ids": ["1460323737035677698"], + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "media_fields": None, + "place_fields": None, + "poll_fields": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["1460323737035677698"]), + ("texts", ["Test tweet content"]), + ("userIds", ["67890"]), + ("userNames", ["testuser1"]), + ("data", [{"id": "1460323737035677698", "text": "Test tweet content"}]), + ("included", {"users": [{"id": "67890", "username": "testuser1"}]}), + ("meta", {"result_count": 1}), + ], + test_mock={ + "get_tweets": lambda *args, **kwargs: ( + ["1460323737035677698"], # ids + ["Test tweet content"], # texts + ["67890"], # user_ids + ["testuser1"], # user_names + [ + {"id": "1460323737035677698", "text": "Test tweet content"} + ], # data + {"users": [{"id": "67890", "username": "testuser1"}]}, # included + {"result_count": 1}, # meta + ) + }, + ) + + @staticmethod + def get_tweets( + credentials: TwitterCredentials, + tweet_ids: list[str], + expansions: ExpansionFilter | None, + media_fields: TweetMediaFieldsFilter | None, + place_fields: TweetPlaceFieldsFilter | None, + poll_fields: TweetPollFieldsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + params = {"ids": tweet_ids, "user_auth": False} + + # Adding expansions to params If required by the user + params = ( + TweetExpansionsBuilder(params) + .add_expansions(expansions) + .add_media_fields(media_fields) + .add_place_fields(place_fields) + .add_poll_fields(poll_fields) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_tweets(**params)) + + if not response.data and not response.meta: + raise Exception("No tweets found") + + tweet_ids = [] + tweet_texts = [] + user_ids = [] + user_names = [] + meta = {} + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + tweet_ids = [str(tweet.id) for tweet in response.data] + tweet_texts = [tweet.text for tweet in response.data] + + if included and "users" in included: + for user in included["users"]: + user_ids.append(str(user["id"])) + user_names.append(user["username"]) + + if response.meta: + meta = response.meta + + return tweet_ids, tweet_texts, user_ids, user_names, data, included, meta + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, texts, user_ids, user_names, data, included, meta = self.get_tweets( + credentials, + input_data.tweet_ids, + input_data.expansions, + input_data.media_fields, + input_data.place_fields, + input_data.poll_fields, + input_data.tweet_fields, + input_data.user_fields, + ) + if ids: + yield "ids", ids + if texts: + yield "texts", texts + if user_ids: + yield "userIds", user_ids + if user_names: + yield "userNames", user_names + if data: + yield "data", data + if included: + yield "included", included + if meta: + yield "meta", meta + + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/users/blocks.py b/autogpt_platform/backend/backend/blocks/twitter/users/blocks.py new file mode 100644 index 0000000000..ebe45038e8 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/users/blocks.py @@ -0,0 +1,305 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import UserExpansionsBuilder +from backend.blocks.twitter._serializer import IncludesSerializer +from backend.blocks.twitter._types import ( + TweetFieldsFilter, + TweetUserFieldsFilter, + UserExpansionInputs, + UserExpansionsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterUnblockUserBlock(Block): + """ + Unblock a specific user on Twitter. The request succeeds with no action when the user sends a request to a user they're not blocking or have already unblocked. + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["block.write", "users.read", "offline.access"] + ) + + target_user_id: str = SchemaField( + description="The user ID of the user that you would like to unblock", + placeholder="Enter target user ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the unblock was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="0f1b6570-a631-11ef-a3ea-230cbe9650dd", + description="This block unblocks a specific user on Twitter.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterUnblockUserBlock.Input, + output_schema=TwitterUnblockUserBlock.Output, + test_input={ + "target_user_id": "12345", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"unblock_user": lambda *args, **kwargs: True}, + ) + + @staticmethod + def unblock_user(credentials: TwitterCredentials, target_user_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.unblock(target_user_id=target_user_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.unblock_user(credentials, input_data.target_user_id) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetBlockedUsersBlock(Block): + """ + Get a list of users who are blocked by the authenticating user + """ + + class Input(UserExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["users.read", "offline.access", "block.read"] + ) + + max_results: int | None = SchemaField( + description="Maximum number of results to return (1-1000, default 100)", + placeholder="Enter max results", + default=10, + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token for retrieving next/previous page of results", + placeholder="Enter pagination token", + default="", + advanced=True, + ) + + class Output(BlockSchema): + user_ids: list[str] = SchemaField(description="List of blocked user IDs") + usernames_: list[str] = SchemaField(description="List of blocked usernames") + included: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Metadata including pagination info") + next_token: str = SchemaField(description="Next token for pagination") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="05f409e8-a631-11ef-ae89-93de863ee30d", + description="This block retrieves a list of users blocked by the authenticating user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetBlockedUsersBlock.Input, + output_schema=TwitterGetBlockedUsersBlock.Output, + test_input={ + "max_results": 10, + "pagination_token": "", + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("user_ids", ["12345", "67890"]), + ("usernames_", ["testuser1", "testuser2"]), + ], + test_mock={ + "get_blocked_users": lambda *args, **kwargs: ( + {}, # included + {}, # meta + ["12345", "67890"], # user_ids + ["testuser1", "testuser2"], # usernames + None, # next_token + ) + }, + ) + + @staticmethod + def get_blocked_users( + credentials: TwitterCredentials, + max_results: int | None, + pagination_token: str | None, + expansions: UserExpansionsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + UserExpansionsBuilder(params) + .add_expansions(expansions) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_blocked(**params)) + + meta = {} + user_ids = [] + usernames = [] + next_token = None + + included = IncludesSerializer.serialize(response.includes) + + if response.data: + for user in response.data: + user_ids.append(str(user.id)) + usernames.append(user.username) + + if response.meta: + meta = response.meta + if "next_token" in meta: + next_token = meta["next_token"] + + if user_ids and usernames: + return included, meta, user_ids, usernames, next_token + else: + raise tweepy.TweepyException("No blocked users found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + included, meta, user_ids, usernames, next_token = self.get_blocked_users( + credentials, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.tweet_fields, + input_data.user_fields, + ) + if user_ids: + yield "user_ids", user_ids + if usernames: + yield "usernames_", usernames + if included: + yield "included", included + if meta: + yield "meta", meta + if next_token: + yield "next_token", next_token + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterBlockUserBlock(Block): + """ + Block a specific user on Twitter + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["block.write", "users.read", "offline.access"] + ) + + target_user_id: str = SchemaField( + description="The user ID of the user that you would like to block", + placeholder="Enter target user ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField(description="Whether the block was successful") + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="fc258b94-a630-11ef-abc3-df050b75b816", + description="This block blocks a specific user on Twitter.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterBlockUserBlock.Input, + output_schema=TwitterBlockUserBlock.Output, + test_input={ + "target_user_id": "12345", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"block_user": lambda *args, **kwargs: True}, + ) + + @staticmethod + def block_user(credentials: TwitterCredentials, target_user_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.block(target_user_id=target_user_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.block_user(credentials, input_data.target_user_id) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/users/follows.py b/autogpt_platform/backend/backend/blocks/twitter/users/follows.py new file mode 100644 index 0000000000..3ffafa6062 --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/users/follows.py @@ -0,0 +1,510 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import UserExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + TweetFieldsFilter, + TweetUserFieldsFilter, + UserExpansionInputs, + UserExpansionsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterUnfollowUserBlock(Block): + """ + Allows a user to unfollow another user specified by target user ID. + The request succeeds with no action when the authenticated user sends a request to a user they're not following or have already unfollowed. + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["users.read", "users.write", "follows.write", "offline.access"] + ) + + target_user_id: str = SchemaField( + description="The user ID of the user that you would like to unfollow", + placeholder="Enter target user ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField( + description="Whether the unfollow action was successful" + ) + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="37e386a4-a631-11ef-b7bd-b78204b35fa4", + description="This block unfollows a specified Twitter user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterUnfollowUserBlock.Input, + output_schema=TwitterUnfollowUserBlock.Output, + test_input={ + "target_user_id": "12345", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"unfollow_user": lambda *args, **kwargs: True}, + ) + + @staticmethod + def unfollow_user(credentials: TwitterCredentials, target_user_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.unfollow_user(target_user_id=target_user_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.unfollow_user(credentials, input_data.target_user_id) + yield "success", success + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterFollowUserBlock(Block): + """ + Allows a user to follow another user specified by target user ID. If the target user does not have public Tweets, + this endpoint will send a follow request. The request succeeds with no action when the authenticated user sends a + request to a user they're already following, or if they're sending a follower request to a user that does not have + public Tweets. + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["users.read", "users.write", "follows.write", "offline.access"] + ) + + target_user_id: str = SchemaField( + description="The user ID of the user that you would like to follow", + placeholder="Enter target user ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField( + description="Whether the follow action was successful" + ) + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="1aae6a5e-a631-11ef-a090-435900c6d429", + description="This block follows a specified Twitter user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterFollowUserBlock.Input, + output_schema=TwitterFollowUserBlock.Output, + test_input={ + "target_user_id": "12345", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[("success", True)], + test_mock={"follow_user": lambda *args, **kwargs: True}, + ) + + @staticmethod + def follow_user(credentials: TwitterCredentials, target_user_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.follow_user(target_user_id=target_user_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.follow_user(credentials, input_data.target_user_id) + yield "success", success + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetFollowersBlock(Block): + """ + Retrieves a list of followers for a specified Twitter user ID + """ + + class Input(UserExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["users.read", "offline.access", "follows.read"] + ) + + target_user_id: str = SchemaField( + description="The user ID whose followers you would like to retrieve", + placeholder="Enter target user ID", + ) + + max_results: int | None = SchemaField( + description="Maximum number of results to return (1-1000, default 100)", + placeholder="Enter max results", + default=10, + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token for retrieving next/previous page of results", + placeholder="Enter pagination token", + default="", + advanced=True, + ) + + class Output(BlockSchema): + ids: list[str] = SchemaField(description="List of follower user IDs") + usernames: list[str] = SchemaField(description="List of follower usernames") + next_token: str = SchemaField(description="Next token for pagination") + + data: list[dict] = SchemaField(description="Complete user data for followers") + includes: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Metadata including pagination info") + + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="30f66410-a631-11ef-8fe7-d7f888b4f43c", + description="This block retrieves followers of a specified Twitter user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetFollowersBlock.Input, + output_schema=TwitterGetFollowersBlock.Output, + test_input={ + "target_user_id": "12345", + "max_results": 1, + "pagination_token": "", + "expansions": None, + "tweet_fields": None, + "user_fields": None, + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["1234567890"]), + ("usernames", ["testuser"]), + ("data", [{"id": "1234567890", "username": "testuser"}]), + ], + test_mock={ + "get_followers": lambda *args, **kwargs: ( + ["1234567890"], + ["testuser"], + [{"id": "1234567890", "username": "testuser"}], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_followers( + credentials: TwitterCredentials, + target_user_id: str, + max_results: int | None, + pagination_token: str | None, + expansions: UserExpansionsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": target_user_id, + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + UserExpansionsBuilder(params) + .add_expansions(expansions) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_users_followers(**params)) + + meta = {} + follower_ids = [] + follower_usernames = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + follower_ids = [str(user.id) for user in response.data] + follower_usernames = [user.username for user in response.data] + + return ( + follower_ids, + follower_usernames, + data, + included, + meta, + next_token, + ) + + raise Exception("Followers not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, usernames, data, includes, meta, next_token = self.get_followers( + credentials, + input_data.target_user_id, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.tweet_fields, + input_data.user_fields, + ) + if ids: + yield "ids", ids + if usernames: + yield "usernames", usernames + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if includes: + yield "includes", includes + if meta: + yield "meta", meta + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetFollowingBlock(Block): + """ + Retrieves a list of users that a specified Twitter user ID is following + """ + + class Input(UserExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["users.read", "offline.access", "follows.read"] + ) + + target_user_id: str = SchemaField( + description="The user ID whose following you would like to retrieve", + placeholder="Enter target user ID", + ) + + max_results: int | None = SchemaField( + description="Maximum number of results to return (1-1000, default 100)", + placeholder="Enter max results", + default=10, + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token for retrieving next/previous page of results", + placeholder="Enter pagination token", + default="", + advanced=True, + ) + + class Output(BlockSchema): + ids: list[str] = SchemaField(description="List of following user IDs") + usernames: list[str] = SchemaField(description="List of following usernames") + next_token: str = SchemaField(description="Next token for pagination") + + data: list[dict] = SchemaField(description="Complete user data for following") + includes: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Metadata including pagination info") + + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="264a399c-a631-11ef-a97d-bfde4ca91173", + description="This block retrieves the users that a specified Twitter user is following.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetFollowingBlock.Input, + output_schema=TwitterGetFollowingBlock.Output, + test_input={ + "target_user_id": "12345", + "max_results": 1, + "pagination_token": None, + "expansions": None, + "tweet_fields": None, + "user_fields": None, + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["1234567890"]), + ("usernames", ["testuser"]), + ("data", [{"id": "1234567890", "username": "testuser"}]), + ], + test_mock={ + "get_following": lambda *args, **kwargs: ( + ["1234567890"], + ["testuser"], + [{"id": "1234567890", "username": "testuser"}], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_following( + credentials: TwitterCredentials, + target_user_id: str, + max_results: int | None, + pagination_token: str | None, + expansions: UserExpansionsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": target_user_id, + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + UserExpansionsBuilder(params) + .add_expansions(expansions) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_users_following(**params)) + + meta = {} + following_ids = [] + following_usernames = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + following_ids = [str(user.id) for user in response.data] + following_usernames = [user.username for user in response.data] + + return ( + following_ids, + following_usernames, + data, + included, + meta, + next_token, + ) + + raise Exception("Following not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, usernames, data, includes, meta, next_token = self.get_following( + credentials, + input_data.target_user_id, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.tweet_fields, + input_data.user_fields, + ) + if ids: + yield "ids", ids + if usernames: + yield "usernames", usernames + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if includes: + yield "includes", includes + if meta: + yield "meta", meta + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/users/mutes.py b/autogpt_platform/backend/backend/blocks/twitter/users/mutes.py new file mode 100644 index 0000000000..175a39011d --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/users/mutes.py @@ -0,0 +1,328 @@ +from typing import cast + +import tweepy +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import UserExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + TweetFieldsFilter, + TweetUserFieldsFilter, + UserExpansionInputs, + UserExpansionsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class TwitterUnmuteUserBlock(Block): + """ + Allows a user to unmute another user specified by target user ID. + The request succeeds with no action when the user sends a request to a user they're not muting or have already unmuted. + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["users.read", "users.write", "offline.access"] + ) + + target_user_id: str = SchemaField( + description="The user ID of the user that you would like to unmute", + placeholder="Enter target user ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField( + description="Whether the unmute action was successful" + ) + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="40458504-a631-11ef-940b-eff92be55422", + description="This block unmutes a specified Twitter user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterUnmuteUserBlock.Input, + output_schema=TwitterUnmuteUserBlock.Output, + test_input={ + "target_user_id": "12345", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"unmute_user": lambda *args, **kwargs: True}, + ) + + @staticmethod + def unmute_user(credentials: TwitterCredentials, target_user_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.unmute(target_user_id=target_user_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.unmute_user(credentials, input_data.target_user_id) + yield "success", success + + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterGetMutedUsersBlock(Block): + """ + Returns a list of users who are muted by the authenticating user + """ + + class Input(UserExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["users.read", "offline.access"] + ) + + max_results: int | None = SchemaField( + description="The maximum number of results to be returned per page (1-1000). Default is 100.", + placeholder="Enter max results", + default=10, + advanced=True, + ) + + pagination_token: str | None = SchemaField( + description="Token to request next/previous page of results", + placeholder="Enter pagination token", + default="", + advanced=True, + ) + + class Output(BlockSchema): + ids: list[str] = SchemaField(description="List of muted user IDs") + usernames: list[str] = SchemaField(description="List of muted usernames") + next_token: str = SchemaField(description="Next token for pagination") + + data: list[dict] = SchemaField(description="Complete user data for muted users") + includes: dict = SchemaField( + description="Additional data requested via expansions" + ) + meta: dict = SchemaField(description="Metadata including pagination info") + + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="475024da-a631-11ef-9ccd-f724b8b03cda", + description="This block gets a list of users muted by the authenticating user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetMutedUsersBlock.Input, + output_schema=TwitterGetMutedUsersBlock.Output, + test_input={ + "max_results": 2, + "pagination_token": "", + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["12345", "67890"]), + ("usernames", ["testuser1", "testuser2"]), + ( + "data", + [ + {"id": "12345", "username": "testuser1"}, + {"id": "67890", "username": "testuser2"}, + ], + ), + ], + test_mock={ + "get_muted_users": lambda *args, **kwargs: ( + ["12345", "67890"], + ["testuser1", "testuser2"], + [ + {"id": "12345", "username": "testuser1"}, + {"id": "67890", "username": "testuser2"}, + ], + {}, + {}, + None, + ) + }, + ) + + @staticmethod + def get_muted_users( + credentials: TwitterCredentials, + max_results: int | None, + pagination_token: str | None, + expansions: UserExpansionsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "max_results": max_results, + "pagination_token": ( + None if pagination_token == "" else pagination_token + ), + "user_auth": False, + } + + params = ( + UserExpansionsBuilder(params) + .add_expansions(expansions) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_muted(**params)) + + meta = {} + user_ids = [] + usernames = [] + next_token = None + + if response.meta: + meta = response.meta + next_token = meta.get("next_token") + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + user_ids = [str(item.id) for item in response.data] + usernames = [item.username for item in response.data] + + return user_ids, usernames, data, included, meta, next_token + + raise Exception("Muted users not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + ids, usernames, data, includes, meta, next_token = self.get_muted_users( + credentials, + input_data.max_results, + input_data.pagination_token, + input_data.expansions, + input_data.tweet_fields, + input_data.user_fields, + ) + if ids: + yield "ids", ids + if usernames: + yield "usernames", usernames + if next_token: + yield "next_token", next_token + if data: + yield "data", data + if includes: + yield "includes", includes + if meta: + yield "meta", meta + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class TwitterMuteUserBlock(Block): + """ + Allows a user to mute another user specified by target user ID + """ + + class Input(BlockSchema): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["users.read", "users.write", "offline.access"] + ) + + target_user_id: str = SchemaField( + description="The user ID of the user that you would like to mute", + placeholder="Enter target user ID", + ) + + class Output(BlockSchema): + success: bool = SchemaField( + description="Whether the mute action was successful" + ) + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="4d1919d0-a631-11ef-90ab-3b73af9ce8f1", + description="This block mutes a specified Twitter user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterMuteUserBlock.Input, + output_schema=TwitterMuteUserBlock.Output, + test_input={ + "target_user_id": "12345", + "credentials": TEST_CREDENTIALS_INPUT, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("success", True), + ], + test_mock={"mute_user": lambda *args, **kwargs: True}, + ) + + @staticmethod + def mute_user(credentials: TwitterCredentials, target_user_id: str): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + client.mute(target_user_id=target_user_id, user_auth=False) + + return True + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + success = self.mute_user(credentials, input_data.target_user_id) + yield "success", success + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/blocks/twitter/users/user_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/users/user_lookup.py new file mode 100644 index 0000000000..16c207f6dd --- /dev/null +++ b/autogpt_platform/backend/backend/blocks/twitter/users/user_lookup.py @@ -0,0 +1,383 @@ +from typing import Literal, Union, cast + +import tweepy +from pydantic import BaseModel +from tweepy.client import Response + +from backend.blocks.twitter._auth import ( + TEST_CREDENTIALS, + TEST_CREDENTIALS_INPUT, + TwitterCredentials, + TwitterCredentialsField, + TwitterCredentialsInput, +) +from backend.blocks.twitter._builders import UserExpansionsBuilder +from backend.blocks.twitter._serializer import ( + IncludesSerializer, + ResponseDataSerializer, +) +from backend.blocks.twitter._types import ( + TweetFieldsFilter, + TweetUserFieldsFilter, + UserExpansionInputs, + UserExpansionsFilter, +) +from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception +from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from backend.data.model import SchemaField + + +class UserId(BaseModel): + discriminator: Literal["user_id"] + user_id: str = SchemaField(description="The ID of the user to lookup", default="") + + +class Username(BaseModel): + discriminator: Literal["username"] + username: str = SchemaField( + description="The Twitter username (handle) of the user", default="" + ) + + +class TwitterGetUserBlock(Block): + """ + Gets information about a single Twitter user specified by ID or username + """ + + class Input(UserExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["users.read", "offline.access"] + ) + + identifier: Union[UserId, Username] = SchemaField( + discriminator="discriminator", + description="Choose whether to identify the user by their unique Twitter ID or by their username", + advanced=False, + ) + + class Output(BlockSchema): + # Common outputs + id: str = SchemaField(description="User ID") + username_: str = SchemaField(description="User username") + name_: str = SchemaField(description="User name") + + # Complete outputs + data: dict = SchemaField(description="Complete user data") + included: dict = SchemaField( + description="Additional data requested via expansions" + ) + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="5446db8e-a631-11ef-812a-cf315d373ee9", + description="This block retrieves information about a specified Twitter user.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetUserBlock.Input, + output_schema=TwitterGetUserBlock.Output, + test_input={ + "identifier": {"discriminator": "username", "username": "twitter"}, + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("id", "783214"), + ("username_", "twitter"), + ("name_", "Twitter"), + ( + "data", + { + "user": { + "id": "783214", + "username": "twitter", + "name": "Twitter", + } + }, + ), + ], + test_mock={ + "get_user": lambda *args, **kwargs: ( + { + "user": { + "id": "783214", + "username": "twitter", + "name": "Twitter", + } + }, + {}, + "twitter", + "783214", + "Twitter", + ) + }, + ) + + @staticmethod + def get_user( + credentials: TwitterCredentials, + identifier: Union[UserId, Username], + expansions: UserExpansionsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "id": identifier.user_id if isinstance(identifier, UserId) else None, + "username": ( + identifier.username if isinstance(identifier, Username) else None + ), + "user_auth": False, + } + + params = ( + UserExpansionsBuilder(params) + .add_expansions(expansions) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_user(**params)) + + username = "" + id = "" + name = "" + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_dict(response.data) + + if response.data: + username = response.data.username + id = str(response.data.id) + name = response.data.name + + if username and id: + return data, included, username, id, name + else: + raise tweepy.TweepyException("User not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + data, included, username, id, name = self.get_user( + credentials, + input_data.identifier, + input_data.expansions, + input_data.tweet_fields, + input_data.user_fields, + ) + if id: + yield "id", id + if username: + yield "username_", username + if name: + yield "name_", name + if data: + yield "data", data + if included: + yield "included", included + except Exception as e: + yield "error", handle_tweepy_exception(e) + + +class UserIdList(BaseModel): + discriminator: Literal["user_id_list"] + user_ids: list[str] = SchemaField( + description="List of user IDs to lookup (max 100)", + placeholder="Enter user IDs", + default=[], + advanced=False, + ) + + +class UsernameList(BaseModel): + discriminator: Literal["username_list"] + usernames: list[str] = SchemaField( + description="List of Twitter usernames/handles to lookup (max 100)", + placeholder="Enter usernames", + default=[], + advanced=False, + ) + + +class TwitterGetUsersBlock(Block): + """ + Gets information about multiple Twitter users specified by IDs or usernames + """ + + class Input(UserExpansionInputs): + credentials: TwitterCredentialsInput = TwitterCredentialsField( + ["users.read", "offline.access"] + ) + + identifier: Union[UserIdList, UsernameList] = SchemaField( + discriminator="discriminator", + description="Choose whether to identify users by their unique Twitter IDs or by their usernames", + advanced=False, + ) + + class Output(BlockSchema): + # Common outputs + ids: list[str] = SchemaField(description="User IDs") + usernames_: list[str] = SchemaField(description="User usernames") + names_: list[str] = SchemaField(description="User names") + + # Complete outputs + data: list[dict] = SchemaField(description="Complete users data") + included: dict = SchemaField( + description="Additional data requested via expansions" + ) + error: str = SchemaField(description="Error message if the request failed") + + def __init__(self): + super().__init__( + id="5abc857c-a631-11ef-8cfc-f7b79354f7a1", + description="This block retrieves information about multiple Twitter users.", + categories={BlockCategory.SOCIAL}, + input_schema=TwitterGetUsersBlock.Input, + output_schema=TwitterGetUsersBlock.Output, + test_input={ + "identifier": { + "discriminator": "username_list", + "usernames": ["twitter", "twitterdev"], + }, + "credentials": TEST_CREDENTIALS_INPUT, + "expansions": None, + "tweet_fields": None, + "user_fields": None, + }, + test_credentials=TEST_CREDENTIALS, + test_output=[ + ("ids", ["783214", "2244994945"]), + ("usernames_", ["twitter", "twitterdev"]), + ("names_", ["Twitter", "Twitter Dev"]), + ( + "data", + [ + {"id": "783214", "username": "twitter", "name": "Twitter"}, + { + "id": "2244994945", + "username": "twitterdev", + "name": "Twitter Dev", + }, + ], + ), + ], + test_mock={ + "get_users": lambda *args, **kwargs: ( + [ + {"id": "783214", "username": "twitter", "name": "Twitter"}, + { + "id": "2244994945", + "username": "twitterdev", + "name": "Twitter Dev", + }, + ], + {}, + ["twitter", "twitterdev"], + ["783214", "2244994945"], + ["Twitter", "Twitter Dev"], + ) + }, + ) + + @staticmethod + def get_users( + credentials: TwitterCredentials, + identifier: Union[UserIdList, UsernameList], + expansions: UserExpansionsFilter | None, + tweet_fields: TweetFieldsFilter | None, + user_fields: TweetUserFieldsFilter | None, + ): + try: + client = tweepy.Client( + bearer_token=credentials.access_token.get_secret_value() + ) + + params = { + "ids": ( + ",".join(identifier.user_ids) + if isinstance(identifier, UserIdList) + else None + ), + "usernames": ( + ",".join(identifier.usernames) + if isinstance(identifier, UsernameList) + else None + ), + "user_auth": False, + } + + params = ( + UserExpansionsBuilder(params) + .add_expansions(expansions) + .add_tweet_fields(tweet_fields) + .add_user_fields(user_fields) + .build() + ) + + response = cast(Response, client.get_users(**params)) + + usernames = [] + ids = [] + names = [] + + included = IncludesSerializer.serialize(response.includes) + data = ResponseDataSerializer.serialize_list(response.data) + + if response.data: + for user in response.data: + usernames.append(user.username) + ids.append(str(user.id)) + names.append(user.name) + + if usernames and ids: + return data, included, usernames, ids, names + else: + raise tweepy.TweepyException("Users not found") + + except tweepy.TweepyException: + raise + + def run( + self, + input_data: Input, + *, + credentials: TwitterCredentials, + **kwargs, + ) -> BlockOutput: + try: + data, included, usernames, ids, names = self.get_users( + credentials, + input_data.identifier, + input_data.expansions, + input_data.tweet_fields, + input_data.user_fields, + ) + if ids: + yield "ids", ids + if usernames: + yield "usernames_", usernames + if names: + yield "names_", names + if data: + yield "data", data + if included: + yield "included", included + except Exception as e: + yield "error", handle_tweepy_exception(e) diff --git a/autogpt_platform/backend/backend/data/model.py b/autogpt_platform/backend/backend/data/model.py index 7271889a7e..3b636ce7ea 100644 --- a/autogpt_platform/backend/backend/data/model.py +++ b/autogpt_platform/backend/backend/data/model.py @@ -226,6 +226,7 @@ class OAuthState(BaseModel): token: str provider: str expires_at: int + code_verifier: Optional[str] = None """Unix timestamp (seconds) indicating when this OAuth state expires""" scopes: list[str] diff --git a/autogpt_platform/backend/backend/integrations/credentials_store.py b/autogpt_platform/backend/backend/integrations/credentials_store.py index 7d539b73c4..a22562de8d 100644 --- a/autogpt_platform/backend/backend/integrations/credentials_store.py +++ b/autogpt_platform/backend/backend/integrations/credentials_store.py @@ -1,6 +1,8 @@ +import base64 +import hashlib import secrets from datetime import datetime, timedelta, timezone -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional from pydantic import SecretStr @@ -210,18 +212,24 @@ class IntegrationCredentialsStore: ] self._set_user_integration_creds(user_id, filtered_credentials) - def store_state_token(self, user_id: str, provider: str, scopes: list[str]) -> str: + def store_state_token( + self, user_id: str, provider: str, scopes: list[str], use_pkce: bool = False + ) -> tuple[str, str]: token = secrets.token_urlsafe(32) expires_at = datetime.now(timezone.utc) + timedelta(minutes=10) + (code_challenge, code_verifier) = self._generate_code_challenge() + state = OAuthState( token=token, provider=provider, + code_verifier=code_verifier, expires_at=int(expires_at.timestamp()), scopes=scopes, ) with self.locked_user_integrations(user_id): + user_integrations = self._get_user_integrations(user_id) oauth_states = user_integrations.oauth_states oauth_states.append(state) @@ -231,39 +239,21 @@ class IntegrationCredentialsStore: user_id=user_id, data=user_integrations ) - return token + return token, code_challenge - def get_any_valid_scopes_from_state_token( + def _generate_code_challenge(self) -> tuple[str, str]: + """ + Generate code challenge using SHA256 from the code verifier. + Currently only SHA256 is supported.(In future if we want to support more methods we can add them here) + """ + code_verifier = secrets.token_urlsafe(128) + sha256_hash = hashlib.sha256(code_verifier.encode("utf-8")).digest() + code_challenge = base64.urlsafe_b64encode(sha256_hash).decode("utf-8") + return code_challenge.replace("=", ""), code_verifier + + def verify_state_token( self, user_id: str, token: str, provider: str - ) -> list[str]: - """ - Get the valid scopes from the OAuth state token. This will return any valid scopes - from any OAuth state token for the given provider. If no valid scopes are found, - an empty list is returned. DO NOT RELY ON THIS TOKEN TO AUTHENTICATE A USER, AS IT - IS TO CHECK IF THE USER HAS GIVEN PERMISSIONS TO THE APPLICATION BEFORE EXCHANGING - THE CODE FOR TOKENS. - """ - user_integrations = self._get_user_integrations(user_id) - oauth_states = user_integrations.oauth_states - - now = datetime.now(timezone.utc) - valid_state = next( - ( - state - for state in oauth_states - if state.token == token - and state.provider == provider - and state.expires_at > now.timestamp() - ), - None, - ) - - if valid_state: - return valid_state.scopes - - return [] - - def verify_state_token(self, user_id: str, token: str, provider: str) -> bool: + ) -> Optional[OAuthState]: with self.locked_user_integrations(user_id): user_integrations = self._get_user_integrations(user_id) oauth_states = user_integrations.oauth_states @@ -285,9 +275,9 @@ class IntegrationCredentialsStore: oauth_states.remove(valid_state) user_integrations.oauth_states = oauth_states self.db_manager.update_user_integrations(user_id, user_integrations) - return True + return valid_state - return False + return None def _set_user_integration_creds( self, user_id: str, credentials: list[Credentials] diff --git a/autogpt_platform/backend/backend/integrations/oauth/__init__.py b/autogpt_platform/backend/backend/integrations/oauth/__init__.py index f5888f07a8..ec45189c59 100644 --- a/autogpt_platform/backend/backend/integrations/oauth/__init__.py +++ b/autogpt_platform/backend/backend/integrations/oauth/__init__.py @@ -3,6 +3,7 @@ from typing import TYPE_CHECKING from .github import GitHubOAuthHandler from .google import GoogleOAuthHandler from .notion import NotionOAuthHandler +from .twitter import TwitterOAuthHandler if TYPE_CHECKING: from ..providers import ProviderName @@ -15,6 +16,7 @@ HANDLERS_BY_NAME: dict["ProviderName", type["BaseOAuthHandler"]] = { GitHubOAuthHandler, GoogleOAuthHandler, NotionOAuthHandler, + TwitterOAuthHandler, ] } # --8<-- [end:HANDLERS_BY_NAMEExample] diff --git a/autogpt_platform/backend/backend/integrations/oauth/base.py b/autogpt_platform/backend/backend/integrations/oauth/base.py index 54786ba1aa..fc6c68c161 100644 --- a/autogpt_platform/backend/backend/integrations/oauth/base.py +++ b/autogpt_platform/backend/backend/integrations/oauth/base.py @@ -1,7 +1,7 @@ import logging import time from abc import ABC, abstractmethod -from typing import ClassVar +from typing import ClassVar, Optional from backend.data.model import OAuth2Credentials from backend.integrations.providers import ProviderName @@ -23,7 +23,9 @@ class BaseOAuthHandler(ABC): @abstractmethod # --8<-- [start:BaseOAuthHandler3] - def get_login_url(self, scopes: list[str], state: str) -> str: + def get_login_url( + self, scopes: list[str], state: str, code_challenge: Optional[str] + ) -> str: # --8<-- [end:BaseOAuthHandler3] """Constructs a login URL that the user can be redirected to""" ... @@ -31,7 +33,7 @@ class BaseOAuthHandler(ABC): @abstractmethod # --8<-- [start:BaseOAuthHandler4] def exchange_code_for_tokens( - self, code: str, scopes: list[str] + self, code: str, scopes: list[str], code_verifier: Optional[str] ) -> OAuth2Credentials: # --8<-- [end:BaseOAuthHandler4] """Exchanges the acquired authorization code from login for a set of tokens""" diff --git a/autogpt_platform/backend/backend/integrations/oauth/github.py b/autogpt_platform/backend/backend/integrations/oauth/github.py index d83c9b2093..e6b3db37b4 100644 --- a/autogpt_platform/backend/backend/integrations/oauth/github.py +++ b/autogpt_platform/backend/backend/integrations/oauth/github.py @@ -34,7 +34,9 @@ class GitHubOAuthHandler(BaseOAuthHandler): self.token_url = "https://github.com/login/oauth/access_token" self.revoke_url = "https://api.github.com/applications/{client_id}/token" - def get_login_url(self, scopes: list[str], state: str) -> str: + def get_login_url( + self, scopes: list[str], state: str, code_challenge: Optional[str] + ) -> str: params = { "client_id": self.client_id, "redirect_uri": self.redirect_uri, @@ -44,7 +46,7 @@ class GitHubOAuthHandler(BaseOAuthHandler): return f"{self.auth_base_url}?{urlencode(params)}" def exchange_code_for_tokens( - self, code: str, scopes: list[str] + self, code: str, scopes: list[str], code_verifier: Optional[str] ) -> OAuth2Credentials: return self._request_tokens({"code": code, "redirect_uri": self.redirect_uri}) diff --git a/autogpt_platform/backend/backend/integrations/oauth/google.py b/autogpt_platform/backend/backend/integrations/oauth/google.py index 5a03e615a4..310eb5ae73 100644 --- a/autogpt_platform/backend/backend/integrations/oauth/google.py +++ b/autogpt_platform/backend/backend/integrations/oauth/google.py @@ -1,4 +1,5 @@ import logging +from typing import Optional from google.auth.external_account_authorized_user import ( Credentials as ExternalAccountCredentials, @@ -38,7 +39,9 @@ class GoogleOAuthHandler(BaseOAuthHandler): self.token_uri = "https://oauth2.googleapis.com/token" self.revoke_uri = "https://oauth2.googleapis.com/revoke" - def get_login_url(self, scopes: list[str], state: str) -> str: + def get_login_url( + self, scopes: list[str], state: str, code_challenge: Optional[str] + ) -> str: all_scopes = list(set(scopes + self.DEFAULT_SCOPES)) logger.debug(f"Setting up OAuth flow with scopes: {all_scopes}") flow = self._setup_oauth_flow(all_scopes) @@ -52,7 +55,7 @@ class GoogleOAuthHandler(BaseOAuthHandler): return authorization_url def exchange_code_for_tokens( - self, code: str, scopes: list[str] + self, code: str, scopes: list[str], code_verifier: Optional[str] ) -> OAuth2Credentials: logger.debug(f"Exchanging code for tokens with scopes: {scopes}") diff --git a/autogpt_platform/backend/backend/integrations/oauth/notion.py b/autogpt_platform/backend/backend/integrations/oauth/notion.py index e71bae2956..3cd3249fef 100644 --- a/autogpt_platform/backend/backend/integrations/oauth/notion.py +++ b/autogpt_platform/backend/backend/integrations/oauth/notion.py @@ -1,4 +1,5 @@ from base64 import b64encode +from typing import Optional from urllib.parse import urlencode from backend.data.model import OAuth2Credentials @@ -26,7 +27,9 @@ class NotionOAuthHandler(BaseOAuthHandler): self.auth_base_url = "https://api.notion.com/v1/oauth/authorize" self.token_url = "https://api.notion.com/v1/oauth/token" - def get_login_url(self, scopes: list[str], state: str) -> str: + def get_login_url( + self, scopes: list[str], state: str, code_challenge: Optional[str] + ) -> str: params = { "client_id": self.client_id, "redirect_uri": self.redirect_uri, @@ -37,7 +40,7 @@ class NotionOAuthHandler(BaseOAuthHandler): return f"{self.auth_base_url}?{urlencode(params)}" def exchange_code_for_tokens( - self, code: str, scopes: list[str] + self, code: str, scopes: list[str], code_verifier: Optional[str] ) -> OAuth2Credentials: request_body = { "grant_type": "authorization_code", diff --git a/autogpt_platform/backend/backend/integrations/oauth/twitter.py b/autogpt_platform/backend/backend/integrations/oauth/twitter.py new file mode 100644 index 0000000000..519ccd354e --- /dev/null +++ b/autogpt_platform/backend/backend/integrations/oauth/twitter.py @@ -0,0 +1,171 @@ +import time +import urllib.parse +from typing import ClassVar, Optional + +import requests + +from backend.data.model import OAuth2Credentials, ProviderName +from backend.integrations.oauth.base import BaseOAuthHandler + + +class TwitterOAuthHandler(BaseOAuthHandler): + PROVIDER_NAME = ProviderName.TWITTER + DEFAULT_SCOPES: ClassVar[list[str]] = [ + "tweet.read", + "tweet.write", + "tweet.moderate.write", + "users.read", + "follows.read", + "follows.write", + "offline.access", + "space.read", + "mute.read", + "mute.write", + "like.read", + "like.write", + "list.read", + "list.write", + "block.read", + "block.write", + "bookmark.read", + "bookmark.write", + ] + + AUTHORIZE_URL = "https://twitter.com/i/oauth2/authorize" + TOKEN_URL = "https://api.x.com/2/oauth2/token" + USERNAME_URL = "https://api.x.com/2/users/me" + REVOKE_URL = "https://api.x.com/2/oauth2/revoke" + + def __init__(self, client_id: str, client_secret: str, redirect_uri: str): + self.client_id = client_id + self.client_secret = client_secret + self.redirect_uri = redirect_uri + + def get_login_url( + self, scopes: list[str], state: str, code_challenge: Optional[str] + ) -> str: + """Generate Twitter OAuth 2.0 authorization URL""" + # scopes = self.handle_default_scopes(scopes) + + if code_challenge is None: + raise ValueError("code_challenge is required for Twitter OAuth") + + params = { + "response_type": "code", + "client_id": self.client_id, + "redirect_uri": self.redirect_uri, + "scope": " ".join(self.DEFAULT_SCOPES), + "state": state, + "code_challenge": code_challenge, + "code_challenge_method": "S256", + } + + return f"{self.AUTHORIZE_URL}?{urllib.parse.urlencode(params)}" + + def exchange_code_for_tokens( + self, code: str, scopes: list[str], code_verifier: Optional[str] + ) -> OAuth2Credentials: + """Exchange authorization code for access tokens""" + + headers = {"Content-Type": "application/x-www-form-urlencoded"} + + data = { + "code": code, + "grant_type": "authorization_code", + "redirect_uri": self.redirect_uri, + "code_verifier": code_verifier, + } + + auth = (self.client_id, self.client_secret) + + response = requests.post(self.TOKEN_URL, headers=headers, data=data, auth=auth) + response.raise_for_status() + + tokens = response.json() + + username = self._get_username(tokens["access_token"]) + + return OAuth2Credentials( + provider=self.PROVIDER_NAME, + title=None, + username=username, + access_token=tokens["access_token"], + refresh_token=tokens.get("refresh_token"), + access_token_expires_at=int(time.time()) + tokens["expires_in"], + refresh_token_expires_at=None, + scopes=scopes, + ) + + def _get_username(self, access_token: str) -> str: + """Get the username from the access token""" + headers = {"Authorization": f"Bearer {access_token}"} + + params = {"user.fields": "username"} + + response = requests.get( + f"{self.USERNAME_URL}?{urllib.parse.urlencode(params)}", headers=headers + ) + response.raise_for_status() + + return response.json()["data"]["username"] + + def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials: + """Refresh access tokens using refresh token""" + if not credentials.refresh_token: + raise ValueError("No refresh token available") + + header = {"Content-Type": "application/x-www-form-urlencoded"} + data = { + "grant_type": "refresh_token", + "refresh_token": credentials.refresh_token.get_secret_value(), + } + + auth = (self.client_id, self.client_secret) + + response = requests.post(self.TOKEN_URL, headers=header, data=data, auth=auth) + + try: + response.raise_for_status() + except requests.exceptions.HTTPError as e: + print("HTTP Error:", e) + print("Response Content:", response.text) + raise + + tokens = response.json() + + username = self._get_username(tokens["access_token"]) + + return OAuth2Credentials( + id=credentials.id, + provider=self.PROVIDER_NAME, + title=None, + username=username, + access_token=tokens["access_token"], + refresh_token=tokens["refresh_token"], + access_token_expires_at=int(time.time()) + tokens["expires_in"], + scopes=credentials.scopes, + refresh_token_expires_at=None, + ) + + def revoke_tokens(self, credentials: OAuth2Credentials) -> bool: + """Revoke the access token""" + + header = {"Content-Type": "application/x-www-form-urlencoded"} + + data = { + "token": credentials.access_token.get_secret_value(), + "token_type_hint": "access_token", + } + + auth = (self.client_id, self.client_secret) + + response = requests.post(self.REVOKE_URL, headers=header, data=data, auth=auth) + + try: + response.raise_for_status() + except requests.exceptions.HTTPError as e: + print("HTTP Error:", e) + print("Response Content:", response.text) + raise + + return response.status_code == 200 diff --git a/autogpt_platform/backend/backend/integrations/providers.py b/autogpt_platform/backend/backend/integrations/providers.py index 291c559d21..d08d50e021 100644 --- a/autogpt_platform/backend/backend/integrations/providers.py +++ b/autogpt_platform/backend/backend/integrations/providers.py @@ -28,5 +28,6 @@ class ProviderName(str, Enum): REPLICATE = "replicate" REVID = "revid" SLANT3D = "slant3d" + TWITTER = "twitter" UNREAL_SPEECH = "unreal_speech" # --8<-- [end:ProviderName] diff --git a/autogpt_platform/backend/backend/server/integrations/router.py b/autogpt_platform/backend/backend/server/integrations/router.py index b4964c790d..6a8c274dd7 100644 --- a/autogpt_platform/backend/backend/server/integrations/router.py +++ b/autogpt_platform/backend/backend/server/integrations/router.py @@ -60,11 +60,12 @@ def login( requested_scopes = scopes.split(",") if scopes else [] # Generate and store a secure random state token along with the scopes - state_token = creds_manager.store.store_state_token( + state_token, code_challenge = creds_manager.store.store_state_token( user_id, provider, requested_scopes ) - - login_url = handler.get_login_url(requested_scopes, state_token) + login_url = handler.get_login_url( + requested_scopes, state_token, code_challenge=code_challenge + ) return LoginResponse(login_url=login_url, state_token=state_token) @@ -92,19 +93,21 @@ def callback( handler = _get_provider_oauth_handler(request, provider) # Verify the state token - if not creds_manager.store.verify_state_token(user_id, state_token, provider): + valid_state = creds_manager.store.verify_state_token(user_id, state_token, provider) + + if not valid_state: logger.warning(f"Invalid or expired state token for user {user_id}") raise HTTPException(status_code=400, detail="Invalid or expired state token") - try: - scopes = creds_manager.store.get_any_valid_scopes_from_state_token( - user_id, state_token, provider - ) + scopes = valid_state.scopes logger.debug(f"Retrieved scopes from state token: {scopes}") scopes = handler.handle_default_scopes(scopes) - credentials = handler.exchange_code_for_tokens(code, scopes) + credentials = handler.exchange_code_for_tokens( + code, scopes, valid_state.code_verifier + ) + logger.debug(f"Received credentials with final scopes: {credentials.scopes}") # Check if the granted scopes are sufficient for the requested scopes diff --git a/autogpt_platform/backend/backend/util/settings.py b/autogpt_platform/backend/backend/util/settings.py index 409a18d93e..a68f597b57 100644 --- a/autogpt_platform/backend/backend/util/settings.py +++ b/autogpt_platform/backend/backend/util/settings.py @@ -264,6 +264,10 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings): notion_client_secret: str = Field( default="", description="Notion OAuth client secret" ) + twitter_client_id: str = Field(default="", description="Twitter/X OAuth client ID") + twitter_client_secret: str = Field( + default="", description="Twitter/X OAuth client secret" + ) openai_api_key: str = Field(default="", description="OpenAI API key") anthropic_api_key: str = Field(default="", description="Anthropic API key") diff --git a/autogpt_platform/backend/poetry.lock b/autogpt_platform/backend/poetry.lock index 3bdc617535..be9688866e 100644 --- a/autogpt_platform/backend/poetry.lock +++ b/autogpt_platform/backend/poetry.lock @@ -19,100 +19,100 @@ yarl = "*" [[package]] name = "aiohappyeyeballs" -version = "2.4.4" +version = "2.4.3" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"}, - {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"}, + {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, + {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, ] [[package]] name = "aiohttp" -version = "3.11.10" +version = "3.11.11" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d"}, - {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f"}, - {file = "aiohttp-3.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61b9bae80ed1f338c42f57c16918853dc51775fb5cb61da70d590de14d8b5fb4"}, - {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e2e576caec5c6a6b93f41626c9c02fc87cd91538b81a3670b2e04452a63def6"}, - {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02c13415b5732fb6ee7ff64583a5e6ed1c57aa68f17d2bda79c04888dfdc2769"}, - {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfce37f31f20800a6a6620ce2cdd6737b82e42e06e6e9bd1b36f546feb3c44f"}, - {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bbbfff4c679c64e6e23cb213f57cc2c9165c9a65d63717108a644eb5a7398df"}, - {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49c7dbbc1a559ae14fc48387a115b7d4bbc84b4a2c3b9299c31696953c2a5219"}, - {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68386d78743e6570f054fe7949d6cb37ef2b672b4d3405ce91fafa996f7d9b4d"}, - {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9ef405356ba989fb57f84cac66f7b0260772836191ccefbb987f414bcd2979d9"}, - {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5d6958671b296febe7f5f859bea581a21c1d05430d1bbdcf2b393599b1cdce77"}, - {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:99b7920e7165be5a9e9a3a7f1b680f06f68ff0d0328ff4079e5163990d046767"}, - {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0dc49f42422163efb7e6f1df2636fe3db72713f6cd94688e339dbe33fe06d61d"}, - {file = "aiohttp-3.11.10-cp310-cp310-win32.whl", hash = "sha256:40d1c7a7f750b5648642586ba7206999650208dbe5afbcc5284bcec6579c9b91"}, - {file = "aiohttp-3.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:68ff6f48b51bd78ea92b31079817aff539f6c8fc80b6b8d6ca347d7c02384e33"}, - {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:77c4aa15a89847b9891abf97f3d4048f3c2d667e00f8a623c89ad2dccee6771b"}, - {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:909af95a72cedbefe5596f0bdf3055740f96c1a4baa0dd11fd74ca4de0b4e3f1"}, - {file = "aiohttp-3.11.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:386fbe79863eb564e9f3615b959e28b222259da0c48fd1be5929ac838bc65683"}, - {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3de34936eb1a647aa919655ff8d38b618e9f6b7f250cc19a57a4bf7fd2062b6d"}, - {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c9527819b29cd2b9f52033e7fb9ff08073df49b4799c89cb5754624ecd98299"}, - {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a96e3e03300b41f261bbfd40dfdbf1c301e87eab7cd61c054b1f2e7c89b9e8"}, - {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f5635f7b74bcd4f6f72fcd85bea2154b323a9f05226a80bc7398d0c90763b0"}, - {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03b6002e20938fc6ee0918c81d9e776bebccc84690e2b03ed132331cca065ee5"}, - {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6362cc6c23c08d18ddbf0e8c4d5159b5df74fea1a5278ff4f2c79aed3f4e9f46"}, - {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3691ed7726fef54e928fe26344d930c0c8575bc968c3e239c2e1a04bd8cf7838"}, - {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31d5093d3acd02b31c649d3a69bb072d539d4c7659b87caa4f6d2bcf57c2fa2b"}, - {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8b3cf2dc0f0690a33f2d2b2cb15db87a65f1c609f53c37e226f84edb08d10f52"}, - {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbbaea811a2bba171197b08eea288b9402faa2bab2ba0858eecdd0a4105753a3"}, - {file = "aiohttp-3.11.10-cp311-cp311-win32.whl", hash = "sha256:4b2c7ac59c5698a7a8207ba72d9e9c15b0fc484a560be0788b31312c2c5504e4"}, - {file = "aiohttp-3.11.10-cp311-cp311-win_amd64.whl", hash = "sha256:974d3a2cce5fcfa32f06b13ccc8f20c6ad9c51802bb7f829eae8a1845c4019ec"}, - {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b78f053a7ecfc35f0451d961dacdc671f4bcbc2f58241a7c820e9d82559844cf"}, - {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab7485222db0959a87fbe8125e233b5a6f01f4400785b36e8a7878170d8c3138"}, - {file = "aiohttp-3.11.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf14627232dfa8730453752e9cdc210966490992234d77ff90bc8dc0dce361d5"}, - {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076bc454a7e6fd646bc82ea7f98296be0b1219b5e3ef8a488afbdd8e81fbac50"}, - {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:482cafb7dc886bebeb6c9ba7925e03591a62ab34298ee70d3dd47ba966370d2c"}, - {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf3d1a519a324af764a46da4115bdbd566b3c73fb793ffb97f9111dbc684fc4d"}, - {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24213ba85a419103e641e55c27dc7ff03536c4873470c2478cce3311ba1eee7b"}, - {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b99acd4730ad1b196bfb03ee0803e4adac371ae8efa7e1cbc820200fc5ded109"}, - {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:14cdb5a9570be5a04eec2ace174a48ae85833c2aadc86de68f55541f66ce42ab"}, - {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7e97d622cb083e86f18317282084bc9fbf261801b0192c34fe4b1febd9f7ae69"}, - {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:012f176945af138abc10c4a48743327a92b4ca9adc7a0e078077cdb5dbab7be0"}, - {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44224d815853962f48fe124748227773acd9686eba6dc102578defd6fc99e8d9"}, - {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c87bf31b7fdab94ae3adbe4a48e711bfc5f89d21cf4c197e75561def39e223bc"}, - {file = "aiohttp-3.11.10-cp312-cp312-win32.whl", hash = "sha256:06a8e2ee1cbac16fe61e51e0b0c269400e781b13bcfc33f5425912391a542985"}, - {file = "aiohttp-3.11.10-cp312-cp312-win_amd64.whl", hash = "sha256:be2b516f56ea883a3e14dda17059716593526e10fb6303189aaf5503937db408"}, - {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8cc5203b817b748adccb07f36390feb730b1bc5f56683445bfe924fc270b8816"}, - {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ef359ebc6949e3a34c65ce20230fae70920714367c63afd80ea0c2702902ccf"}, - {file = "aiohttp-3.11.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9bca390cb247dbfaec3c664326e034ef23882c3f3bfa5fbf0b56cad0320aaca5"}, - {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811f23b3351ca532af598405db1093f018edf81368e689d1b508c57dcc6b6a32"}, - {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddf5f7d877615f6a1e75971bfa5ac88609af3b74796ff3e06879e8422729fd01"}, - {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ab29b8a0beb6f8eaf1e5049252cfe74adbaafd39ba91e10f18caeb0e99ffb34"}, - {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c49a76c1038c2dd116fa443eba26bbb8e6c37e924e2513574856de3b6516be99"}, - {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f3dc0e330575f5b134918976a645e79adf333c0a1439dcf6899a80776c9ab39"}, - {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:efb15a17a12497685304b2d976cb4939e55137df7b09fa53f1b6a023f01fcb4e"}, - {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:db1d0b28fcb7f1d35600150c3e4b490775251dea70f894bf15c678fdd84eda6a"}, - {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:15fccaf62a4889527539ecb86834084ecf6e9ea70588efde86e8bc775e0e7542"}, - {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:593c114a2221444f30749cc5e5f4012488f56bd14de2af44fe23e1e9894a9c60"}, - {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7852bbcb4d0d2f0c4d583f40c3bc750ee033265d80598d0f9cb6f372baa6b836"}, - {file = "aiohttp-3.11.10-cp313-cp313-win32.whl", hash = "sha256:65e55ca7debae8faaffee0ebb4b47a51b4075f01e9b641c31e554fd376595c6c"}, - {file = "aiohttp-3.11.10-cp313-cp313-win_amd64.whl", hash = "sha256:beb39a6d60a709ae3fb3516a1581777e7e8b76933bb88c8f4420d875bb0267c6"}, - {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0580f2e12de2138f34debcd5d88894786453a76e98febaf3e8fe5db62d01c9bf"}, - {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a55d2ad345684e7c3dd2c20d2f9572e9e1d5446d57200ff630e6ede7612e307f"}, - {file = "aiohttp-3.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04814571cb72d65a6899db6099e377ed00710bf2e3eafd2985166f2918beaf59"}, - {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e44a9a3c053b90c6f09b1bb4edd880959f5328cf63052503f892c41ea786d99f"}, - {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:502a1464ccbc800b4b1995b302efaf426e8763fadf185e933c2931df7db9a199"}, - {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:613e5169f8ae77b1933e42e418a95931fb4867b2991fc311430b15901ed67079"}, - {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cca22a61b7fe45da8fc73c3443150c3608750bbe27641fc7558ec5117b27fdf"}, - {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86a5dfcc39309470bd7b68c591d84056d195428d5d2e0b5ccadfbaf25b026ebc"}, - {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:77ae58586930ee6b2b6f696c82cf8e78c8016ec4795c53e36718365f6959dc82"}, - {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:78153314f26d5abef3239b4a9af20c229c6f3ecb97d4c1c01b22c4f87669820c"}, - {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:98283b94cc0e11c73acaf1c9698dea80c830ca476492c0fe2622bd931f34b487"}, - {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:53bf2097e05c2accc166c142a2090e4c6fd86581bde3fd9b2d3f9e93dda66ac1"}, - {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5532f0441fc09c119e1dca18fbc0687e64fbeb45aa4d6a87211ceaee50a74c4"}, - {file = "aiohttp-3.11.10-cp39-cp39-win32.whl", hash = "sha256:47ad15a65fb41c570cd0ad9a9ff8012489e68176e7207ec7b82a0940dddfd8be"}, - {file = "aiohttp-3.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:c6b9e6d7e41656d78e37ce754813fa44b455c3d0d0dced2a047def7dc5570b74"}, - {file = "aiohttp-3.11.10.tar.gz", hash = "sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e"}, + {file = "aiohttp-3.11.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a60804bff28662cbcf340a4d61598891f12eea3a66af48ecfdc975ceec21e3c8"}, + {file = "aiohttp-3.11.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b4fa1cb5f270fb3eab079536b764ad740bb749ce69a94d4ec30ceee1b5940d5"}, + {file = "aiohttp-3.11.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:731468f555656767cda219ab42e033355fe48c85fbe3ba83a349631541715ba2"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb23d8bb86282b342481cad4370ea0853a39e4a32a0042bb52ca6bdde132df43"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f047569d655f81cb70ea5be942ee5d4421b6219c3f05d131f64088c73bb0917f"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd7659baae9ccf94ae5fe8bfaa2c7bc2e94d24611528395ce88d009107e00c6d"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af01e42ad87ae24932138f154105e88da13ce7d202a6de93fafdafb2883a00ef"}, + {file = "aiohttp-3.11.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5854be2f3e5a729800bac57a8d76af464e160f19676ab6aea74bde18ad19d438"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6526e5fb4e14f4bbf30411216780c9967c20c5a55f2f51d3abd6de68320cc2f3"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:85992ee30a31835fc482468637b3e5bd085fa8fe9392ba0bdcbdc1ef5e9e3c55"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:88a12ad8ccf325a8a5ed80e6d7c3bdc247d66175afedbe104ee2aaca72960d8e"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0a6d3fbf2232e3a08c41eca81ae4f1dff3d8f1a30bae415ebe0af2d2458b8a33"}, + {file = "aiohttp-3.11.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84a585799c58b795573c7fa9b84c455adf3e1d72f19a2bf498b54a95ae0d194c"}, + {file = "aiohttp-3.11.11-cp310-cp310-win32.whl", hash = "sha256:bfde76a8f430cf5c5584553adf9926534352251d379dcb266ad2b93c54a29745"}, + {file = "aiohttp-3.11.11-cp310-cp310-win_amd64.whl", hash = "sha256:0fd82b8e9c383af11d2b26f27a478640b6b83d669440c0a71481f7c865a51da9"}, + {file = "aiohttp-3.11.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ba74ec819177af1ef7f59063c6d35a214a8fde6f987f7661f4f0eecc468a8f76"}, + {file = "aiohttp-3.11.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4af57160800b7a815f3fe0eba9b46bf28aafc195555f1824555fa2cfab6c1538"}, + {file = "aiohttp-3.11.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffa336210cf9cd8ed117011085817d00abe4c08f99968deef0013ea283547204"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b8fe282183e4a3c7a1b72f5ade1094ed1c6345a8f153506d114af5bf8accd9"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af41686ccec6a0f2bdc66686dc0f403c41ac2089f80e2214a0f82d001052c03"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70d1f9dde0e5dd9e292a6d4d00058737052b01f3532f69c0c65818dac26dc287"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:249cc6912405917344192b9f9ea5cd5b139d49e0d2f5c7f70bdfaf6b4dbf3a2e"}, + {file = "aiohttp-3.11.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0eb98d90b6690827dcc84c246811feeb4e1eea683c0eac6caed7549be9c84665"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec82bf1fda6cecce7f7b915f9196601a1bd1a3079796b76d16ae4cce6d0ef89b"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9fd46ce0845cfe28f108888b3ab17abff84ff695e01e73657eec3f96d72eef34"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bd176afcf8f5d2aed50c3647d4925d0db0579d96f75a31e77cbaf67d8a87742d"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ec2aa89305006fba9ffb98970db6c8221541be7bee4c1d027421d6f6df7d1ce2"}, + {file = "aiohttp-3.11.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:92cde43018a2e17d48bb09c79e4d4cb0e236de5063ce897a5e40ac7cb4878773"}, + {file = "aiohttp-3.11.11-cp311-cp311-win32.whl", hash = "sha256:aba807f9569455cba566882c8938f1a549f205ee43c27b126e5450dc9f83cc62"}, + {file = "aiohttp-3.11.11-cp311-cp311-win_amd64.whl", hash = "sha256:ae545f31489548c87b0cced5755cfe5a5308d00407000e72c4fa30b19c3220ac"}, + {file = "aiohttp-3.11.11-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e595c591a48bbc295ebf47cb91aebf9bd32f3ff76749ecf282ea7f9f6bb73886"}, + {file = "aiohttp-3.11.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ea1b59dc06396b0b424740a10a0a63974c725b1c64736ff788a3689d36c02d2"}, + {file = "aiohttp-3.11.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8811f3f098a78ffa16e0ea36dffd577eb031aea797cbdba81be039a4169e242c"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7227b87a355ce1f4bf83bfae4399b1f5bb42e0259cb9405824bd03d2f4336a"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d40f9da8cabbf295d3a9dae1295c69975b86d941bc20f0a087f0477fa0a66231"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffb3dc385f6bb1568aa974fe65da84723210e5d9707e360e9ecb51f59406cd2e"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8f5f7515f3552d899c61202d99dcb17d6e3b0de777900405611cd747cecd1b8"}, + {file = "aiohttp-3.11.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3499c7ffbfd9c6a3d8d6a2b01c26639da7e43d47c7b4f788016226b1e711caa8"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8e2bf8029dbf0810c7bfbc3e594b51c4cc9101fbffb583a3923aea184724203c"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6212a60e5c482ef90f2d788835387070a88d52cf6241d3916733c9176d39eab"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d119fafe7b634dbfa25a8c597718e69a930e4847f0b88e172744be24515140da"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:6fba278063559acc730abf49845d0e9a9e1ba74f85f0ee6efd5803f08b285853"}, + {file = "aiohttp-3.11.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92fc484e34b733704ad77210c7957679c5c3877bd1e6b6d74b185e9320cc716e"}, + {file = "aiohttp-3.11.11-cp312-cp312-win32.whl", hash = "sha256:9f5b3c1ed63c8fa937a920b6c1bec78b74ee09593b3f5b979ab2ae5ef60d7600"}, + {file = "aiohttp-3.11.11-cp312-cp312-win_amd64.whl", hash = "sha256:1e69966ea6ef0c14ee53ef7a3d68b564cc408121ea56c0caa2dc918c1b2f553d"}, + {file = "aiohttp-3.11.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:541d823548ab69d13d23730a06f97460f4238ad2e5ed966aaf850d7c369782d9"}, + {file = "aiohttp-3.11.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:929f3ed33743a49ab127c58c3e0a827de0664bfcda566108989a14068f820194"}, + {file = "aiohttp-3.11.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0882c2820fd0132240edbb4a51eb8ceb6eef8181db9ad5291ab3332e0d71df5f"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b63de12e44935d5aca7ed7ed98a255a11e5cb47f83a9fded7a5e41c40277d104"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa54f8ef31d23c506910c21163f22b124facb573bff73930735cf9fe38bf7dff"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a344d5dc18074e3872777b62f5f7d584ae4344cd6006c17ba12103759d407af3"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7fb429ab1aafa1f48578eb315ca45bd46e9c37de11fe45c7f5f4138091e2f1"}, + {file = "aiohttp-3.11.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c341c7d868750e31961d6d8e60ff040fb9d3d3a46d77fd85e1ab8e76c3e9a5c4"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed9ee95614a71e87f1a70bc81603f6c6760128b140bc4030abe6abaa988f1c3d"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:de8d38f1c2810fa2a4f1d995a2e9c70bb8737b18da04ac2afbf3971f65781d87"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a9b7371665d4f00deb8f32208c7c5e652059b0fda41cf6dbcac6114a041f1cc2"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:620598717fce1b3bd14dd09947ea53e1ad510317c85dda2c9c65b622edc96b12"}, + {file = "aiohttp-3.11.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bf8d9bfee991d8acc72d060d53860f356e07a50f0e0d09a8dfedea1c554dd0d5"}, + {file = "aiohttp-3.11.11-cp313-cp313-win32.whl", hash = "sha256:9d73ee3725b7a737ad86c2eac5c57a4a97793d9f442599bea5ec67ac9f4bdc3d"}, + {file = "aiohttp-3.11.11-cp313-cp313-win_amd64.whl", hash = "sha256:c7a06301c2fb096bdb0bd25fe2011531c1453b9f2c163c8031600ec73af1cc99"}, + {file = "aiohttp-3.11.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3e23419d832d969f659c208557de4a123e30a10d26e1e14b73431d3c13444c2e"}, + {file = "aiohttp-3.11.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21fef42317cf02e05d3b09c028712e1d73a9606f02467fd803f7c1f39cc59add"}, + {file = "aiohttp-3.11.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1f21bb8d0235fc10c09ce1d11ffbd40fc50d3f08a89e4cf3a0c503dc2562247a"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1642eceeaa5ab6c9b6dfeaaa626ae314d808188ab23ae196a34c9d97efb68350"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2170816e34e10f2fd120f603e951630f8a112e1be3b60963a1f159f5699059a6"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8be8508d110d93061197fd2d6a74f7401f73b6d12f8822bbcd6d74f2b55d71b1"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eed954b161e6b9b65f6be446ed448ed3921763cc432053ceb606f89d793927e"}, + {file = "aiohttp-3.11.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6c9af134da4bc9b3bd3e6a70072509f295d10ee60c697826225b60b9959acdd"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:44167fc6a763d534a6908bdb2592269b4bf30a03239bcb1654781adf5e49caf1"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:479b8c6ebd12aedfe64563b85920525d05d394b85f166b7873c8bde6da612f9c"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:10b4ff0ad793d98605958089fabfa350e8e62bd5d40aa65cdc69d6785859f94e"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b540bd67cfb54e6f0865ceccd9979687210d7ed1a1cc8c01f8e67e2f1e883d28"}, + {file = "aiohttp-3.11.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1dac54e8ce2ed83b1f6b1a54005c87dfed139cf3f777fdc8afc76e7841101226"}, + {file = "aiohttp-3.11.11-cp39-cp39-win32.whl", hash = "sha256:568c1236b2fde93b7720f95a890741854c1200fba4a3471ff48b2934d2d93fd3"}, + {file = "aiohttp-3.11.11-cp39-cp39-win_amd64.whl", hash = "sha256:943a8b052e54dfd6439fd7989f67fc6a7f2138d0a2cf0a7de5f18aa4fe7eb3b1"}, + {file = "aiohttp-3.11.11.tar.gz", hash = "sha256:bb49c7f1e6ebf3821a42d81d494f538107610c3a705987f53068546b0e90303e"}, ] [package.dependencies] @@ -198,25 +198,25 @@ vertex = ["google-auth (>=2,<3)"] [[package]] name = "anyio" -version = "4.7.0" +version = "4.6.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, - {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -249,15 +249,15 @@ zookeeper = ["kazoo"] [[package]] name = "async-timeout" -version = "5.0.1" +version = "4.0.3" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" groups = ["main"] markers = "python_full_version < \"3.11.3\"" files = [ - {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, - {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] [[package]] @@ -460,117 +460,102 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" groups = ["main", "dev"] files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -731,20 +716,20 @@ files = [ [[package]] name = "e2b" -version = "1.0.5" +version = "1.0.4" description = "E2B SDK that give agents cloud environments" optional = false python-versions = "<4.0,>=3.8" groups = ["main"] files = [ - {file = "e2b-1.0.5-py3-none-any.whl", hash = "sha256:a71bdec46f33d3e38e87d475d7fd2939bd7b6b753b819c9639ca211cd375b79e"}, - {file = "e2b-1.0.5.tar.gz", hash = "sha256:43c82705af7b7d4415c2510ff77dab4dc075351e0b769d6adf8e0d7bb4868d13"}, + {file = "e2b-1.0.4-py3-none-any.whl", hash = "sha256:1a9c765eb1b2cc291c5ebd3f2e268f8fba9471a12f470f4651395b5753730170"}, + {file = "e2b-1.0.4.tar.gz", hash = "sha256:5ed3db4f984e52cf3aabb717725493ff060a8374b7c878b31bceeff46a0b5648"}, ] [package.dependencies] attrs = ">=23.2.0" httpcore = ">=1.0.5,<2.0.0" -httpx = ">=0.27.0,<1.0.0" +httpx = ">=0.27.0,<0.28.0" packaging = ">=24.1" protobuf = ">=3.20.0,<6.0.0" python-dateutil = ">=2.8.2" @@ -752,20 +737,20 @@ typing-extensions = ">=4.1.0" [[package]] name = "e2b-code-interpreter" -version = "1.0.3" +version = "1.0.1" description = "E2B Code Interpreter - Stateful code execution" optional = false python-versions = "<4.0,>=3.8" groups = ["main"] files = [ - {file = "e2b_code_interpreter-1.0.3-py3-none-any.whl", hash = "sha256:c638bd4ec1c99d9c4eaac541bc8b15134cf786f6c7c400d979cef96d62e485d8"}, - {file = "e2b_code_interpreter-1.0.3.tar.gz", hash = "sha256:36475acc001b1317ed129d65970fce6a7cc2d50e3fd3e8a13ad5d7d3e0fac237"}, + {file = "e2b_code_interpreter-1.0.1-py3-none-any.whl", hash = "sha256:e27c40174ba7daac4942388611a73e1ac58300227f0ba6c0555ee54507d4944c"}, + {file = "e2b_code_interpreter-1.0.1.tar.gz", hash = "sha256:b0c061e41315d21514affe78f80052be335b687204e669dd7ca852b59eeaaea2"}, ] [package.dependencies] attrs = ">=21.3.0" -e2b = ">=1.0.4,<2.0.0" -httpx = ">=0.20.0,<1.0.0" +e2b = ">=1.0.0,<2.0.0" +httpx = ">=0.20.0,<0.28.0" [[package]] name = "exceptiongroup" @@ -816,14 +801,14 @@ typing-extensions = "*" [[package]] name = "fastapi" -version = "0.115.6" +version = "0.115.5" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "fastapi-0.115.6-py3-none-any.whl", hash = "sha256:e9240b29e36fa8f4bb7290316988e90c381e5092e0cbe84e7818cc3713bcf305"}, - {file = "fastapi-0.115.6.tar.gz", hash = "sha256:9ec46f7addc14ea472958a96aae5b5de65f39721a46aaf5705c480d9a8b76654"}, + {file = "fastapi-0.115.5-py3-none-any.whl", hash = "sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796"}, + {file = "fastapi-0.115.5.tar.gz", hash = "sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289"}, ] [package.dependencies] @@ -869,104 +854,89 @@ pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "frozenlist" -version = "1.5.0" +version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, - {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, - {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, - {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, - {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, - {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, - {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, - {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, - {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, - {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, - {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, - {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, - {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, - {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, - {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, - {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, - {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, - {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, - {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, - {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, - {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"}, - {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"}, - {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"}, - {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"}, - {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"}, - {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"}, - {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"}, - {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"}, - {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"}, - {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"}, - {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"}, - {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"}, - {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"}, - {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"}, - {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"}, - {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, - {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, - {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, - {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, - {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, - {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, - {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] [[package]] @@ -1245,14 +1215,14 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.66.0" +version = "1.65.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, - {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"}, + {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, + {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, ] [package.dependencies] @@ -1278,18 +1248,18 @@ requests = ">=2.20.0,<3.0" [[package]] name = "gotrue" -version = "2.11.0" +version = "2.11.1" description = "Python Client Library for Supabase Auth" optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "gotrue-2.11.0-py3-none-any.whl", hash = "sha256:62177ffd567448b352121bc7e9244ff018d59bb746dad476b51658f856d59cf8"}, - {file = "gotrue-2.11.0.tar.gz", hash = "sha256:a0a452748ef741337820c97b934327c25f796e7cd33c0bf4341346bcc5a837f5"}, + {file = "gotrue-2.11.1-py3-none-any.whl", hash = "sha256:1b2d915bdc65fd0ad608532759ce9c72fa2e910145c1e6901f2188519e7bcd2d"}, + {file = "gotrue-2.11.1.tar.gz", hash = "sha256:5594ceee60bd873e5f4fdd028b08dece3906f6013b6ed08e7786b71c0092fed0"}, ] [package.dependencies] -httpx = {version = ">=0.26,<0.28", extras = ["http2"]} +httpx = {version = ">=0.26,<0.29", extras = ["http2"]} pydantic = ">=1.10,<3" [[package]] @@ -1419,87 +1389,87 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [[package]] name = "grpcio" -version = "1.68.1" +version = "1.68.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "grpcio-1.68.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:d35740e3f45f60f3c37b1e6f2f4702c23867b9ce21c6410254c9c682237da68d"}, - {file = "grpcio-1.68.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:d99abcd61760ebb34bdff37e5a3ba333c5cc09feda8c1ad42547bea0416ada78"}, - {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:f8261fa2a5f679abeb2a0a93ad056d765cdca1c47745eda3f2d87f874ff4b8c9"}, - {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0feb02205a27caca128627bd1df4ee7212db051019a9afa76f4bb6a1a80ca95e"}, - {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919d7f18f63bcad3a0f81146188e90274fde800a94e35d42ffe9eadf6a9a6330"}, - {file = "grpcio-1.68.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:963cc8d7d79b12c56008aabd8b457f400952dbea8997dd185f155e2f228db079"}, - {file = "grpcio-1.68.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ccf2ebd2de2d6661e2520dae293298a3803a98ebfc099275f113ce1f6c2a80f1"}, - {file = "grpcio-1.68.1-cp310-cp310-win32.whl", hash = "sha256:2cc1fd04af8399971bcd4f43bd98c22d01029ea2e56e69c34daf2bf8470e47f5"}, - {file = "grpcio-1.68.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2e743e51cb964b4975de572aa8fb95b633f496f9fcb5e257893df3be854746"}, - {file = "grpcio-1.68.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:55857c71641064f01ff0541a1776bfe04a59db5558e82897d35a7793e525774c"}, - {file = "grpcio-1.68.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4b177f5547f1b995826ef529d2eef89cca2f830dd8b2c99ffd5fde4da734ba73"}, - {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:3522c77d7e6606d6665ec8d50e867f13f946a4e00c7df46768f1c85089eae515"}, - {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d1fae6bbf0816415b81db1e82fb3bf56f7857273c84dcbe68cbe046e58e1ccd"}, - {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:298ee7f80e26f9483f0b6f94cc0a046caf54400a11b644713bb5b3d8eb387600"}, - {file = "grpcio-1.68.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cbb5780e2e740b6b4f2d208e90453591036ff80c02cc605fea1af8e6fc6b1bbe"}, - {file = "grpcio-1.68.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ddda1aa22495d8acd9dfbafff2866438d12faec4d024ebc2e656784d96328ad0"}, - {file = "grpcio-1.68.1-cp311-cp311-win32.whl", hash = "sha256:b33bd114fa5a83f03ec6b7b262ef9f5cac549d4126f1dc702078767b10c46ed9"}, - {file = "grpcio-1.68.1-cp311-cp311-win_amd64.whl", hash = "sha256:7f20ebec257af55694d8f993e162ddf0d36bd82d4e57f74b31c67b3c6d63d8b2"}, - {file = "grpcio-1.68.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:8829924fffb25386995a31998ccbbeaa7367223e647e0122043dfc485a87c666"}, - {file = "grpcio-1.68.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3aed6544e4d523cd6b3119b0916cef3d15ef2da51e088211e4d1eb91a6c7f4f1"}, - {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:4efac5481c696d5cb124ff1c119a78bddbfdd13fc499e3bc0ca81e95fc573684"}, - {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ab2d912ca39c51f46baf2a0d92aa265aa96b2443266fc50d234fa88bf877d8e"}, - {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c87ce2a97434dffe7327a4071839ab8e8bffd0054cc74cbe971fba98aedd60"}, - {file = "grpcio-1.68.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e4842e4872ae4ae0f5497bf60a0498fa778c192cc7a9e87877abd2814aca9475"}, - {file = "grpcio-1.68.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:255b1635b0ed81e9f91da4fcc8d43b7ea5520090b9a9ad9340d147066d1d3613"}, - {file = "grpcio-1.68.1-cp312-cp312-win32.whl", hash = "sha256:7dfc914cc31c906297b30463dde0b9be48e36939575eaf2a0a22a8096e69afe5"}, - {file = "grpcio-1.68.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0c8ddabef9c8f41617f213e527254c41e8b96ea9d387c632af878d05db9229c"}, - {file = "grpcio-1.68.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:a47faedc9ea2e7a3b6569795c040aae5895a19dde0c728a48d3c5d7995fda385"}, - {file = "grpcio-1.68.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:390eee4225a661c5cd133c09f5da1ee3c84498dc265fd292a6912b65c421c78c"}, - {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:66a24f3d45c33550703f0abb8b656515b0ab777970fa275693a2f6dc8e35f1c1"}, - {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c08079b4934b0bf0a8847f42c197b1d12cba6495a3d43febd7e99ecd1cdc8d54"}, - {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8720c25cd9ac25dd04ee02b69256d0ce35bf8a0f29e20577427355272230965a"}, - {file = "grpcio-1.68.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:04cfd68bf4f38f5bb959ee2361a7546916bd9a50f78617a346b3aeb2b42e2161"}, - {file = "grpcio-1.68.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c28848761a6520c5c6071d2904a18d339a796ebe6b800adc8b3f474c5ce3c3ad"}, - {file = "grpcio-1.68.1-cp313-cp313-win32.whl", hash = "sha256:77d65165fc35cff6e954e7fd4229e05ec76102d4406d4576528d3a3635fc6172"}, - {file = "grpcio-1.68.1-cp313-cp313-win_amd64.whl", hash = "sha256:a8040f85dcb9830d8bbb033ae66d272614cec6faceee88d37a88a9bd1a7a704e"}, - {file = "grpcio-1.68.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:eeb38ff04ab6e5756a2aef6ad8d94e89bb4a51ef96e20f45c44ba190fa0bcaad"}, - {file = "grpcio-1.68.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a3869a6661ec8f81d93f4597da50336718bde9eb13267a699ac7e0a1d6d0bea"}, - {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2c4cec6177bf325eb6faa6bd834d2ff6aa8bb3b29012cceb4937b86f8b74323c"}, - {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12941d533f3cd45d46f202e3667be8ebf6bcb3573629c7ec12c3e211d99cfccf"}, - {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80af6f1e69c5e68a2be529990684abdd31ed6622e988bf18850075c81bb1ad6e"}, - {file = "grpcio-1.68.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e8dbe3e00771bfe3d04feed8210fc6617006d06d9a2679b74605b9fed3e8362c"}, - {file = "grpcio-1.68.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:83bbf5807dc3ee94ce1de2dfe8a356e1d74101e4b9d7aa8c720cc4818a34aded"}, - {file = "grpcio-1.68.1-cp38-cp38-win32.whl", hash = "sha256:8cb620037a2fd9eeee97b4531880e439ebfcd6d7d78f2e7dcc3726428ab5ef63"}, - {file = "grpcio-1.68.1-cp38-cp38-win_amd64.whl", hash = "sha256:52fbf85aa71263380d330f4fce9f013c0798242e31ede05fcee7fbe40ccfc20d"}, - {file = "grpcio-1.68.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:cb400138e73969eb5e0535d1d06cae6a6f7a15f2cc74add320e2130b8179211a"}, - {file = "grpcio-1.68.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a1b988b40f2fd9de5c820f3a701a43339d8dcf2cb2f1ca137e2c02671cc83ac1"}, - {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:96f473cdacfdd506008a5d7579c9f6a7ff245a9ade92c3c0265eb76cc591914f"}, - {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:37ea3be171f3cf3e7b7e412a98b77685eba9d4fd67421f4a34686a63a65d99f9"}, - {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ceb56c4285754e33bb3c2fa777d055e96e6932351a3082ce3559be47f8024f0"}, - {file = "grpcio-1.68.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dffd29a2961f3263a16d73945b57cd44a8fd0b235740cb14056f0612329b345e"}, - {file = "grpcio-1.68.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:025f790c056815b3bf53da850dd70ebb849fd755a4b1ac822cb65cd631e37d43"}, - {file = "grpcio-1.68.1-cp39-cp39-win32.whl", hash = "sha256:1098f03dedc3b9810810568060dea4ac0822b4062f537b0f53aa015269be0a76"}, - {file = "grpcio-1.68.1-cp39-cp39-win_amd64.whl", hash = "sha256:334ab917792904245a028f10e803fcd5b6f36a7b2173a820c0b5b076555825e1"}, - {file = "grpcio-1.68.1.tar.gz", hash = "sha256:44a8502dd5de653ae6a73e2de50a401d84184f0331d0ac3daeb044e66d5c5054"}, + {file = "grpcio-1.68.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544"}, + {file = "grpcio-1.68.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3"}, + {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:c03d89df516128febc5a7e760d675b478ba25802447624edf7aa13b1e7b11e2a"}, + {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44bcbebb24363d587472089b89e2ea0ab2e2b4df0e4856ba4c0b087c82412121"}, + {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79f81b7fbfb136247b70465bd836fa1733043fdee539cd6031cb499e9608a110"}, + {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:88fb2925789cfe6daa20900260ef0a1d0a61283dfb2d2fffe6194396a354c618"}, + {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:99f06232b5c9138593ae6f2e355054318717d32a9c09cdc5a2885540835067a1"}, + {file = "grpcio-1.68.0-cp310-cp310-win32.whl", hash = "sha256:a6213d2f7a22c3c30a479fb5e249b6b7e648e17f364598ff64d08a5136fe488b"}, + {file = "grpcio-1.68.0-cp310-cp310-win_amd64.whl", hash = "sha256:15327ab81131ef9b94cb9f45b5bd98803a179c7c61205c8c0ac9aff9d6c4e82a"}, + {file = "grpcio-1.68.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3b2b559beb2d433129441783e5f42e3be40a9e1a89ec906efabf26591c5cd415"}, + {file = "grpcio-1.68.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e46541de8425a4d6829ac6c5d9b16c03c292105fe9ebf78cb1c31e8d242f9155"}, + {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c1245651f3c9ea92a2db4f95d37b7597db6b246d5892bca6ee8c0e90d76fb73c"}, + {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1931c7aa85be0fa6cea6af388e576f3bf6baee9e5d481c586980c774debcb4"}, + {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ff09c81e3aded7a183bc6473639b46b6caa9c1901d6f5e2cba24b95e59e30"}, + {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8c73f9fbbaee1a132487e31585aa83987ddf626426d703ebcb9a528cf231c9b1"}, + {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6b2f98165ea2790ea159393a2246b56f580d24d7da0d0342c18a085299c40a75"}, + {file = "grpcio-1.68.0-cp311-cp311-win32.whl", hash = "sha256:e1e7ed311afb351ff0d0e583a66fcb39675be112d61e7cfd6c8269884a98afbc"}, + {file = "grpcio-1.68.0-cp311-cp311-win_amd64.whl", hash = "sha256:e0d2f68eaa0a755edd9a47d40e50dba6df2bceda66960dee1218da81a2834d27"}, + {file = "grpcio-1.68.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8af6137cc4ae8e421690d276e7627cfc726d4293f6607acf9ea7260bd8fc3d7d"}, + {file = "grpcio-1.68.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4028b8e9a3bff6f377698587d642e24bd221810c06579a18420a17688e421af7"}, + {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f60fa2adf281fd73ae3a50677572521edca34ba373a45b457b5ebe87c2d01e1d"}, + {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e18589e747c1e70b60fab6767ff99b2d0c359ea1db8a2cb524477f93cdbedf5b"}, + {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d30f3fee9372796f54d3100b31ee70972eaadcc87314be369360248a3dcffe"}, + {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7e0a3e72c0e9a1acab77bef14a73a416630b7fd2cbd893c0a873edc47c42c8cd"}, + {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a831dcc343440969aaa812004685ed322cdb526cd197112d0db303b0da1e8659"}, + {file = "grpcio-1.68.0-cp312-cp312-win32.whl", hash = "sha256:5a180328e92b9a0050958ced34dddcb86fec5a8b332f5a229e353dafc16cd332"}, + {file = "grpcio-1.68.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bddd04a790b69f7a7385f6a112f46ea0b34c4746f361ebafe9ca0be567c78e9"}, + {file = "grpcio-1.68.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:fc05759ffbd7875e0ff2bd877be1438dfe97c9312bbc558c8284a9afa1d0f40e"}, + {file = "grpcio-1.68.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:15fa1fe25d365a13bc6d52fcac0e3ee1f9baebdde2c9b3b2425f8a4979fccea1"}, + {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:32a9cb4686eb2e89d97022ecb9e1606d132f85c444354c17a7dbde4a455e4a3b"}, + {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dba037ff8d284c8e7ea9a510c8ae0f5b016004f13c3648f72411c464b67ff2fb"}, + {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0efbbd849867e0e569af09e165363ade75cf84f5229b2698d53cf22c7a4f9e21"}, + {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:4e300e6978df0b65cc2d100c54e097c10dfc7018b9bd890bbbf08022d47f766d"}, + {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:6f9c7ad1a23e1047f827385f4713b5b8c6c7d325705be1dd3e31fb00dcb2f665"}, + {file = "grpcio-1.68.0-cp313-cp313-win32.whl", hash = "sha256:3ac7f10850fd0487fcce169c3c55509101c3bde2a3b454869639df2176b60a03"}, + {file = "grpcio-1.68.0-cp313-cp313-win_amd64.whl", hash = "sha256:afbf45a62ba85a720491bfe9b2642f8761ff348006f5ef67e4622621f116b04a"}, + {file = "grpcio-1.68.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:f8f695d9576ce836eab27ba7401c60acaf9ef6cf2f70dfe5462055ba3df02cc3"}, + {file = "grpcio-1.68.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9fe1b141cda52f2ca73e17d2d3c6a9f3f3a0c255c216b50ce616e9dca7e3441d"}, + {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:4df81d78fd1646bf94ced4fb4cd0a7fe2e91608089c522ef17bc7db26e64effd"}, + {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46a2d74d4dd8993151c6cd585594c082abe74112c8e4175ddda4106f2ceb022f"}, + {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a17278d977746472698460c63abf333e1d806bd41f2224f90dbe9460101c9796"}, + {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:15377bce516b1c861c35e18eaa1c280692bf563264836cece693c0f169b48829"}, + {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc5f0a4f5904b8c25729a0498886b797feb817d1fd3812554ffa39551112c161"}, + {file = "grpcio-1.68.0-cp38-cp38-win32.whl", hash = "sha256:def1a60a111d24376e4b753db39705adbe9483ef4ca4761f825639d884d5da78"}, + {file = "grpcio-1.68.0-cp38-cp38-win_amd64.whl", hash = "sha256:55d3b52fd41ec5772a953612db4e70ae741a6d6ed640c4c89a64f017a1ac02b5"}, + {file = "grpcio-1.68.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:0d230852ba97654453d290e98d6aa61cb48fa5fafb474fb4c4298d8721809354"}, + {file = "grpcio-1.68.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:50992f214264e207e07222703c17d9cfdcc2c46ed5a1ea86843d440148ebbe10"}, + {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:14331e5c27ed3545360464a139ed279aa09db088f6e9502e95ad4bfa852bb116"}, + {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f84890b205692ea813653ece4ac9afa2139eae136e419231b0eec7c39fdbe4c2"}, + {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0cf343c6f4f6aa44863e13ec9ddfe299e0be68f87d68e777328bff785897b05"}, + {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fd2c2d47969daa0e27eadaf15c13b5e92605c5e5953d23c06d0b5239a2f176d3"}, + {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:18668e36e7f4045820f069997834e94e8275910b1f03e078a6020bd464cb2363"}, + {file = "grpcio-1.68.0-cp39-cp39-win32.whl", hash = "sha256:2af76ab7c427aaa26aa9187c3e3c42f38d3771f91a20f99657d992afada2294a"}, + {file = "grpcio-1.68.0-cp39-cp39-win_amd64.whl", hash = "sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490"}, + {file = "grpcio-1.68.0.tar.gz", hash = "sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.68.1)"] +protobuf = ["grpcio-tools (>=1.68.0)"] [[package]] name = "grpcio-status" -version = "1.68.1" +version = "1.68.0" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "grpcio_status-1.68.1-py3-none-any.whl", hash = "sha256:66f3d8847f665acfd56221333d66f7ad8927903d87242a482996bdb45e8d28fd"}, - {file = "grpcio_status-1.68.1.tar.gz", hash = "sha256:e1378d036c81a1610d7b4c7a146cd663dd13fcc915cf4d7d053929dba5bbb6e1"}, + {file = "grpcio_status-1.68.0-py3-none-any.whl", hash = "sha256:0a71b15d989f02df803b4ba85c5bf1f43aeaa58ac021e5f9974b8cadc41f784d"}, + {file = "grpcio_status-1.68.0.tar.gz", hash = "sha256:8369823de22ab6a2cddb3804669c149ae7a71819e127c2dca7c2322028d52bea"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.68.1" +grpcio = ">=1.68.0" protobuf = ">=5.26.1,<6.0dev" [[package]] @@ -1544,14 +1514,14 @@ files = [ [[package]] name = "httpcore" -version = "1.0.7" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] @@ -1562,7 +1532,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<1.0)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httplib2" @@ -1880,14 +1850,14 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2024.10.1" +version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, - {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, ] [package.dependencies] @@ -1936,73 +1906,72 @@ test-filesource = ["pyyaml (>=5.3.1)", "watchdog (>=3.0.0)"] [[package]] name = "markupsafe" -version = "3.0.2" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.9" +python-versions = ">=3.7" groups = ["main"] files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -2222,14 +2191,14 @@ importlib-metadata = ">=6.0,<=8.5.0" [[package]] name = "packaging" -version = "24.2" +version = "24.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -2361,14 +2330,14 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "poethepoet" -version = "0.31.1" +version = "0.31.0" description = "A task runner that works well with poetry." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "poethepoet-0.31.1-py3-none-any.whl", hash = "sha256:7fdfa0ac6074be9936723e7231b5bfaad2923e96c674a9857e81d326cf8ccdc2"}, - {file = "poethepoet-0.31.1.tar.gz", hash = "sha256:d6b66074edf85daf115bb916eae0afd6387d19e1562e1c9ef7d61d5c585696aa"}, + {file = "poethepoet-0.31.0-py3-none-any.whl", hash = "sha256:5067c5adf9f228b8af1f3df7d57dc319ed8b3f153bf21faf99f7b74494174c3d"}, + {file = "poethepoet-0.31.0.tar.gz", hash = "sha256:b1cffb120149101b02ffa0583c6e61dfee53953a741df3dabf179836bdef97f5"}, ] [package.dependencies] @@ -2381,19 +2350,19 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "postgrest" -version = "0.18.0" +version = "0.19.1" description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "postgrest-0.18.0-py3-none-any.whl", hash = "sha256:200baad0d23fee986b3a0ffd3e07bfe0cdd40e09760f11e8e13a6c0c2376d5fa"}, - {file = "postgrest-0.18.0.tar.gz", hash = "sha256:29c1a94801a17eb9ad590189993fe5a7a6d8c1bfc11a3c9d0ce7ba146454ebb3"}, + {file = "postgrest-0.19.1-py3-none-any.whl", hash = "sha256:a8e7be4e1abc69fd8eee5a49d7dc3a76dfbffbd778beed0b2bd7accb3f4f3a2a"}, + {file = "postgrest-0.19.1.tar.gz", hash = "sha256:d8fa88953cced4f45efa0f412056c364f64ece8a35b5b35f458a7e58c133fbca"}, ] [package.dependencies] deprecation = ">=2.1.0,<3.0.0" -httpx = {version = ">=0.26,<0.28", extras = ["http2"]} +httpx = {version = ">=0.26,<0.29", extras = ["http2"]} pydantic = ">=1.9,<3.0" strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""} @@ -2581,23 +2550,23 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "5.29.1" +version = "5.28.2" description = "" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "protobuf-5.29.1-cp310-abi3-win32.whl", hash = "sha256:22c1f539024241ee545cbcb00ee160ad1877975690b16656ff87dde107b5f110"}, - {file = "protobuf-5.29.1-cp310-abi3-win_amd64.whl", hash = "sha256:1fc55267f086dd4050d18ef839d7bd69300d0d08c2a53ca7df3920cc271a3c34"}, - {file = "protobuf-5.29.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d473655e29c0c4bbf8b69e9a8fb54645bc289dead6d753b952e7aa660254ae18"}, - {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5ba1d0e4c8a40ae0496d0e2ecfdbb82e1776928a205106d14ad6985a09ec155"}, - {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ee1461b3af56145aca2800e6a3e2f928108c749ba8feccc6f5dd0062c410c0d"}, - {file = "protobuf-5.29.1-cp38-cp38-win32.whl", hash = "sha256:50879eb0eb1246e3a5eabbbe566b44b10348939b7cc1b267567e8c3d07213853"}, - {file = "protobuf-5.29.1-cp38-cp38-win_amd64.whl", hash = "sha256:027fbcc48cea65a6b17028510fdd054147057fa78f4772eb547b9274e5219331"}, - {file = "protobuf-5.29.1-cp39-cp39-win32.whl", hash = "sha256:5a41deccfa5e745cef5c65a560c76ec0ed8e70908a67cc8f4da5fce588b50d57"}, - {file = "protobuf-5.29.1-cp39-cp39-win_amd64.whl", hash = "sha256:012ce28d862ff417fd629285aca5d9772807f15ceb1a0dbd15b88f58c776c98c"}, - {file = "protobuf-5.29.1-py3-none-any.whl", hash = "sha256:32600ddb9c2a53dedc25b8581ea0f1fd8ea04956373c0c07577ce58d312522e0"}, - {file = "protobuf-5.29.1.tar.gz", hash = "sha256:683be02ca21a6ffe80db6dd02c0b5b2892322c59ca57fd6c872d652cb80549cb"}, + {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"}, + {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"}, + {file = "protobuf-5.28.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7"}, + {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f"}, + {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f"}, + {file = "protobuf-5.28.2-cp38-cp38-win32.whl", hash = "sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0"}, + {file = "protobuf-5.28.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3"}, + {file = "protobuf-5.28.2-cp39-cp39-win32.whl", hash = "sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36"}, + {file = "protobuf-5.28.2-cp39-cp39-win_amd64.whl", hash = "sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276"}, + {file = "protobuf-5.28.2-py3-none-any.whl", hash = "sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece"}, + {file = "protobuf-5.28.2.tar.gz", hash = "sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0"}, ] [[package]] @@ -2948,14 +2917,14 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pyparsing" -version = "3.2.0" +version = "3.1.4" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false -python-versions = ">=3.9" +python-versions = ">=3.6.8" groups = ["main"] files = [ - {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"}, - {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"}, + {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, + {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, ] [package.extras] @@ -2975,14 +2944,14 @@ files = [ [[package]] name = "pyright" -version = "1.1.390" +version = "1.1.389" description = "Command line wrapper for pyright" optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "pyright-1.1.390-py3-none-any.whl", hash = "sha256:ecebfba5b6b50af7c1a44c2ba144ba2ab542c227eb49bc1f16984ff714e0e110"}, - {file = "pyright-1.1.390.tar.gz", hash = "sha256:aad7f160c49e0fbf8209507a15e17b781f63a86a1facb69ca877c71ef2e9538d"}, + {file = "pyright-1.1.389-py3-none-any.whl", hash = "sha256:41e9620bba9254406dc1f621a88ceab5a88af4c826feb4f614d95691ed243a60"}, + {file = "pyright-1.1.389.tar.gz", hash = "sha256:716bf8cc174ab8b4dcf6828c3298cac05c5ed775dda9910106a5dcfe4c7fe220"}, ] [package.dependencies] @@ -3011,14 +2980,14 @@ serpent = ">=1.41" [[package]] name = "pytest" -version = "8.3.4" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] @@ -3192,32 +3161,32 @@ files = [ [[package]] name = "realtime" -version = "2.0.6" +version = "2.0.5" description = "" optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "realtime-2.0.6-py3-none-any.whl", hash = "sha256:9aab6009c11883197386a0a9dc8c2b6939e62dddda734cfb77594727ac9ae0ce"}, - {file = "realtime-2.0.6.tar.gz", hash = "sha256:ced37686a77a546571029ecc74cfb31fff1404a5159d1198fa882af545843a6f"}, + {file = "realtime-2.0.5-py3-none-any.whl", hash = "sha256:f9ec2d762794709e37a8e2745c8dfd86eac4870678808f09676c8f2b7bfa6bbc"}, + {file = "realtime-2.0.5.tar.gz", hash = "sha256:133828fbc2cc2325fb015fe071c6da9fb488819cac96d85ed297045c715b35f5"}, ] [package.dependencies] -aiohttp = ">=3.10.10,<4.0.0" +aiohttp = ">=3.10.6,<4.0.0" python-dateutil = ">=2.8.1,<3.0.0" typing-extensions = ">=4.12.2,<5.0.0" websockets = ">=11,<14" [[package]] name = "redis" -version = "5.2.1" +version = "5.2.0" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, - {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, + {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"}, + {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"}, ] [package.dependencies] @@ -3285,14 +3254,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" -version = "2.0.0" +version = "1.3.1" description = "OAuthlib authentication support for Requests." optional = false -python-versions = ">=3.4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["main"] files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, ] [package.dependencies] @@ -3304,115 +3273,115 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "rpds-py" -version = "0.22.3" +version = "0.20.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, - {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, - {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, - {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, - {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, - {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, - {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, - {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, - {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, - {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, - {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, - {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, - {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, + {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, + {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, + {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, + {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, + {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, + {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, + {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, + {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, + {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, + {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, + {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, + {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, + {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, ] [[package]] @@ -3550,14 +3519,14 @@ files = [ [[package]] name = "six" -version = "1.17.0" +version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" groups = ["main", "dev"] files = [ - {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, - {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] [[package]] @@ -3670,14 +3639,14 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.41.3" +version = "0.41.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7"}, - {file = "starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835"}, + {file = "starlette-0.41.2-py3-none-any.whl", hash = "sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d"}, + {file = "starlette-0.41.2.tar.gz", hash = "sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62"}, ] [package.dependencies] @@ -3688,18 +3657,18 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 [[package]] name = "storage3" -version = "0.9.0" +version = "0.11.0" description = "Supabase Storage client for Python." optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "storage3-0.9.0-py3-none-any.whl", hash = "sha256:8b2fb91f0c61583a2f4eac74a8bae67e00d41ff38095c8a6cd3f2ce5e0ab76e7"}, - {file = "storage3-0.9.0.tar.gz", hash = "sha256:e16697f60894c94e1d9df0d2e4af783c1b3f7dd08c9013d61978825c624188c4"}, + {file = "storage3-0.11.0-py3-none-any.whl", hash = "sha256:de2d8f9c9103ca91a9a9d0d69d80b07a3ab6f647b93e023e6a1a97d3607b9728"}, + {file = "storage3-0.11.0.tar.gz", hash = "sha256:243583f2180686c0f0a19e6117d8a9796fd60c0ca72ec567d62b75a5af0d57a1"}, ] [package.dependencies] -httpx = {version = ">=0.26,<0.28", extras = ["http2"]} +httpx = {version = ">=0.26,<0.29", extras = ["http2"]} python-dateutil = ">=2.8.2,<3.0.0" [[package]] @@ -3721,38 +3690,39 @@ test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"] [[package]] name = "supabase" -version = "2.10.0" +version = "2.11.0" description = "Supabase client for Python." optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "supabase-2.10.0-py3-none-any.whl", hash = "sha256:183fb23c04528593f8f81c24ceb8178f3a56bff40fec7ed873b6c55ebc2e420a"}, - {file = "supabase-2.10.0.tar.gz", hash = "sha256:9ac095f8947bf60780e67c0edcbab53e2db3f6f3f022329397b093500bf2607c"}, + {file = "supabase-2.11.0-py3-none-any.whl", hash = "sha256:67a0da498895f4cd6554935e2854b4c41f87b297b78fb9c9414902a382041406"}, + {file = "supabase-2.11.0.tar.gz", hash = "sha256:2a906f7909fd9a50f944cd9332ce66c684e2d37c0864284d34c5815e6c63cc01"}, ] [package.dependencies] -gotrue = ">=2.10.0,<3.0.0" -httpx = ">=0.26,<0.28" -postgrest = ">=0.18,<0.19" +gotrue = ">=2.11.0,<3.0.0" +httpx = ">=0.26,<0.29" +postgrest = ">=0.19,<0.20" realtime = ">=2.0.0,<3.0.0" -storage3 = ">=0.9.0,<0.10.0" -supafunc = ">=0.7.0,<0.8.0" +storage3 = ">=0.10,<0.12" +supafunc = ">=0.9,<0.10" [[package]] name = "supafunc" -version = "0.7.0" +version = "0.9.0" description = "Library for Supabase Functions" optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "supafunc-0.7.0-py3-none-any.whl", hash = "sha256:4160260dc02bdd906be1e2ffd7cb3ae8b74ae437c892bb475352b6a99d9ff8eb"}, - {file = "supafunc-0.7.0.tar.gz", hash = "sha256:5b1c415fba1395740b2b4eedd1d786384bd58b98f6333a11ba7889820a48b6a7"}, + {file = "supafunc-0.9.0-py3-none-any.whl", hash = "sha256:2aa3ab4d125c1843c28f1b437db2442ea68448f2654b6b78196dbe077197c52a"}, + {file = "supafunc-0.9.0.tar.gz", hash = "sha256:64cdf331f5a3f2afc7c181697d4723efc084620ea66611f3211dd5ecbef595c1"}, ] [package.dependencies] httpx = {version = ">=0.26,<0.28", extras = ["http2"]} +strenum = ">=0.4.15,<0.5.0" [[package]] name = "tenacity" @@ -3772,45 +3742,15 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "tomli" -version = "2.2.1" +version = "2.0.1" description = "A lil' TOML parser" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" groups = ["main", "dev"] markers = "python_version < \"3.11\"" files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] [[package]] @@ -3827,26 +3767,49 @@ files = [ [[package]] name = "tqdm" -version = "4.67.1" +version = "4.66.5" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, - {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] -discord = ["requests"] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "tweepy" +version = "4.14.0" +description = "Twitter library for Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "tweepy-4.14.0-py3-none-any.whl", hash = "sha256:db6d3844ccc0c6d27f339f12ba8acc89912a961da513c1ae50fa2be502a56afb"}, + {file = "tweepy-4.14.0.tar.gz", hash = "sha256:1f9f1707d6972de6cff6c5fd90dfe6a449cd2e0d70bd40043ffab01e07a06c8c"}, +] + +[package.dependencies] +oauthlib = ">=3.2.0,<4" +requests = ">=2.27.0,<3" +requests-oauthlib = ">=1.2.0,<2" + +[package.extras] +async = ["aiohttp (>=3.7.3,<4)", "async-lru (>=1.0.3,<3)"] +dev = ["coverage (>=4.4.2)", "coveralls (>=2.1.0)", "tox (>=3.21.0)"] +docs = ["myst-parser (==0.15.2)", "readthedocs-sphinx-search (==0.1.1)", "sphinx (==4.2.0)", "sphinx-hoverxref (==0.7b1)", "sphinx-rtd-theme (==1.0.0)", "sphinx-tabs (==3.2.0)"] +socks = ["requests[socks] (>=2.27.0,<3)"] +test = ["vcrpy (>=1.10.3)"] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -3969,95 +3932,88 @@ standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", [[package]] name = "uvloop" -version = "0.21.0" +version = "0.20.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" groups = ["main"] markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\"" files = [ - {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, - {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, - {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, - {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, - {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, - {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, - {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, - {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, - {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, - {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, - {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, - {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, - {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, - {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, - {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, - {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, - {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, - {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, - {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, - {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, - {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, - {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, - {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, - {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, - {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, - {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, - {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, - {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, - {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, - {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, - {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, - {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, - {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, - {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, - {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, - {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, - {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996"}, + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66"}, + {file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"}, ] [package.extras] -dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] [[package]] name = "watchdog" -version = "6.0.0" +version = "5.0.3" description = "Filesystem events monitoring" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, - {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, - {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"}, - {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, - {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, - {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"}, - {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, - {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, - {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, - {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, - {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, + {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:85527b882f3facda0579bce9d743ff7f10c3e1e0db0a0d0e28170a7d0e5ce2ea"}, + {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53adf73dcdc0ef04f7735066b4a57a4cd3e49ef135daae41d77395f0b5b692cb"}, + {file = "watchdog-5.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e25adddab85f674acac303cf1f5835951345a56c5f7f582987d266679979c75b"}, + {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f01f4a3565a387080dc49bdd1fefe4ecc77f894991b88ef927edbfa45eb10818"}, + {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91b522adc25614cdeaf91f7897800b82c13b4b8ac68a42ca959f992f6990c490"}, + {file = "watchdog-5.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d52db5beb5e476e6853da2e2d24dbbbed6797b449c8bf7ea118a4ee0d2c9040e"}, + {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:94d11b07c64f63f49876e0ab8042ae034674c8653bfcdaa8c4b32e71cfff87e8"}, + {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:349c9488e1d85d0a58e8cb14222d2c51cbc801ce11ac3936ab4c3af986536926"}, + {file = "watchdog-5.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:53a3f10b62c2d569e260f96e8d966463dec1a50fa4f1b22aec69e3f91025060e"}, + {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:950f531ec6e03696a2414b6308f5c6ff9dab7821a768c9d5788b1314e9a46ca7"}, + {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae6deb336cba5d71476caa029ceb6e88047fc1dc74b62b7c4012639c0b563906"}, + {file = "watchdog-5.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1021223c08ba8d2d38d71ec1704496471ffd7be42cfb26b87cd5059323a389a1"}, + {file = "watchdog-5.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:752fb40efc7cc8d88ebc332b8f4bcbe2b5cc7e881bccfeb8e25054c00c994ee3"}, + {file = "watchdog-5.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2e8f3f955d68471fa37b0e3add18500790d129cc7efe89971b8a4cc6fdeb0b2"}, + {file = "watchdog-5.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b8ca4d854adcf480bdfd80f46fdd6fb49f91dd020ae11c89b3a79e19454ec627"}, + {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:90a67d7857adb1d985aca232cc9905dd5bc4803ed85cfcdcfcf707e52049eda7"}, + {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:720ef9d3a4f9ca575a780af283c8fd3a0674b307651c1976714745090da5a9e8"}, + {file = "watchdog-5.0.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:223160bb359281bb8e31c8f1068bf71a6b16a8ad3d9524ca6f523ac666bb6a1e"}, + {file = "watchdog-5.0.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:560135542c91eaa74247a2e8430cf83c4342b29e8ad4f520ae14f0c8a19cfb5b"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dd021efa85970bd4824acacbb922066159d0f9e546389a4743d56919b6758b91"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_armv7l.whl", hash = "sha256:78864cc8f23dbee55be34cc1494632a7ba30263951b5b2e8fc8286b95845f82c"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_i686.whl", hash = "sha256:1e9679245e3ea6498494b3028b90c7b25dbb2abe65c7d07423ecfc2d6218ff7c"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64.whl", hash = "sha256:9413384f26b5d050b6978e6fcd0c1e7f0539be7a4f1a885061473c5deaa57221"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:294b7a598974b8e2c6123d19ef15de9abcd282b0fbbdbc4d23dfa812959a9e05"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_s390x.whl", hash = "sha256:26dd201857d702bdf9d78c273cafcab5871dd29343748524695cecffa44a8d97"}, + {file = "watchdog-5.0.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:0f9332243355643d567697c3e3fa07330a1d1abf981611654a1f2bf2175612b7"}, + {file = "watchdog-5.0.3-py3-none-win32.whl", hash = "sha256:c66f80ee5b602a9c7ab66e3c9f36026590a0902db3aea414d59a2f55188c1f49"}, + {file = "watchdog-5.0.3-py3-none-win_amd64.whl", hash = "sha256:f00b4cf737f568be9665563347a910f8bdc76f88c2970121c86243c8cfdf90e9"}, + {file = "watchdog-5.0.3-py3-none-win_ia64.whl", hash = "sha256:49f4d36cb315c25ea0d946e018c01bb028048023b9e103d3d3943f58e109dd45"}, + {file = "watchdog-5.0.3.tar.gz", hash = "sha256:108f42a7f0345042a854d4d0ad0834b741d421330d5f575b81cb27b883500176"}, ] [package.extras] @@ -4262,77 +4218,82 @@ files = [ [[package]] name = "wrapt" -version = "1.17.0" +version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" groups = ["main"] files = [ - {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, - {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, - {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, - {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, - {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, - {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, - {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, - {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, - {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, - {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, - {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, - {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, - {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, - {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, - {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, - {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, - {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, - {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, - {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, - {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, - {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, - {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, - {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] [[package]] @@ -4471,4 +4432,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "9325d134cf2c4390c520b9c5e8bae290a0fa05e0c782aa6b1f079d31d9a5c8f5" +content-hash = "711669de9e6d5b81f19286bd41d52f57bc0177ba8ff5f2b477313a5b2d012ae5" diff --git a/autogpt_platform/backend/pyproject.toml b/autogpt_platform/backend/pyproject.toml index f03f9b8ae9..0573c61c15 100644 --- a/autogpt_platform/backend/pyproject.toml +++ b/autogpt_platform/backend/pyproject.toml @@ -39,8 +39,9 @@ python-dotenv = "^1.0.1" redis = "^5.2.0" sentry-sdk = "2.19.2" strenum = "^0.4.9" -supabase = "^2.10.0" +supabase = "2.11.0" tenacity = "^9.0.0" +tweepy = "^4.14.0" uvicorn = { extras = ["standard"], version = "^0.34.0" } websockets = "^13.1" youtube-transcript-api = "^0.6.2" diff --git a/autogpt_platform/frontend/src/components/CustomNode.tsx b/autogpt_platform/frontend/src/components/CustomNode.tsx index 4100932048..82c633b8f6 100644 --- a/autogpt_platform/frontend/src/components/CustomNode.tsx +++ b/autogpt_platform/frontend/src/components/CustomNode.tsx @@ -253,7 +253,13 @@ export function CustomNode({ !isHidden && (isRequired || isAdvancedOpen || isConnected || !isAdvanced) && (
- {isConnectable ? ( + {isConnectable && + !( + "oneOf" in propSchema && + propSchema.oneOf && + "discriminator" in propSchema && + propSchema.discriminator + ) ? ( = { replicate: "Replicate", fal: "FAL", revid: "Rev.ID", + twitter: "Twitter", unreal_speech: "Unreal Speech", exa: "Exa", hubspot: "Hubspot", diff --git a/autogpt_platform/frontend/src/components/node-input-components.tsx b/autogpt_platform/frontend/src/components/node-input-components.tsx index 2d8c52800b..b06c4f0321 100644 --- a/autogpt_platform/frontend/src/components/node-input-components.tsx +++ b/autogpt_platform/frontend/src/components/node-input-components.tsx @@ -17,6 +17,7 @@ import { BlockIOStringSubSchema, BlockIONumberSubSchema, BlockIOBooleanSubSchema, + BlockIOSimpleTypeSubSchema, } from "@/lib/autogpt-server-api/types"; import React, { FC, useCallback, useEffect, useMemo, useState } from "react"; import { Button } from "./ui/button"; @@ -40,6 +41,7 @@ import { LocalValuedInput } from "./ui/input"; import NodeHandle from "./NodeHandle"; import { ConnectionData } from "./CustomNode"; import { CredentialsInput } from "./integrations/credentials-input"; +import { MultiSelect } from "./ui/multiselect-input"; type NodeObjectInputTreeProps = { nodeId: string; @@ -311,6 +313,8 @@ export const NodeGenericInputField: FC<{ ); } + console.log("propSchema", propSchema); + if ("properties" in propSchema) { // Render a multi-select for all-boolean sub-schemas with more than 3 properties if ( @@ -376,12 +380,53 @@ export const NodeGenericInputField: FC<{ } if ("anyOf" in propSchema) { + // Optional oneOf + if ( + "oneOf" in propSchema.anyOf[0] && + propSchema.anyOf[0].oneOf && + "discriminator" in propSchema.anyOf[0] && + propSchema.anyOf[0].discriminator + ) { + return ( + + ); + } + // optional items const types = propSchema.anyOf.map((s) => "type" in s ? s.type : undefined, ); if (types.includes("string") && types.includes("null")) { - // optional string + // optional string and datetime + + if ( + "format" in propSchema.anyOf[0] && + propSchema.anyOf[0].format === "date-time" + ) { + return ( + + ); + } + return ( ); } else if (types.includes("object") && types.includes("null")) { + // rendering optional mutliselect + if ( + Object.values( + (propSchema.anyOf[0] as BlockIOObjectSubSchema).properties, + ).every( + (subSchema) => "type" in subSchema && subSchema.type == "boolean", + ) && + Object.keys((propSchema.anyOf[0] as BlockIOObjectSubSchema).properties) + .length >= 1 + ) { + const options = Object.keys( + (propSchema.anyOf[0] as BlockIOObjectSubSchema).properties, + ); + const selectedKeys = Object.entries(currentValue || {}) + .filter(([_, v]) => v) + .map(([k, _]) => k); + return ( + { + handleInputChange( + key, + Object.fromEntries( + options.map((option) => [option, selection.includes(option)]), + ), + ); + }} + /> + ); + } + return ( void; handleInputClick: (key: string) => void; className?: string; @@ -637,7 +718,6 @@ const NodeOneOfDiscriminatorField: FC<{ handleInputChange, handleInputClick, className, - displayName, }) => { const discriminator = propSchema.discriminator; @@ -653,7 +733,7 @@ const NodeOneOfDiscriminatorField: FC<{ return { value: variantDiscValue, - schema: variant, + schema: variant as BlockIOSubSchema, }; }) .filter((v: any) => v.value != null); @@ -684,8 +764,24 @@ const NodeOneOfDiscriminatorField: FC<{ (opt: any) => opt.value === chosenType, )?.schema; + function getEntryKey(key: string): string { + // use someKey for handle purpose (not childKey) + return `${propKey}_#_${key}`; + } + + function isConnected(key: string): boolean { + return connections.some( + (c) => c.targetHandle === getEntryKey(key) && c.target === nodeId, + ); + } + return ( -
+