[Fix]: Plumb provider tokens to runtime (#7247)

Co-authored-by: openhands <openhands@all-hands.dev>
Co-authored-by: Engel Nyst <enyst@users.noreply.github.com>
This commit is contained in:
Rohit Malhotra
2025-03-20 18:43:27 -04:00
committed by GitHub
parent 6f204fd557
commit 41efa100f0
16 changed files with 686 additions and 188 deletions

View File

@@ -15,6 +15,7 @@ from openhands.core.config import (
from openhands.core.logger import openhands_logger as logger
from openhands.events import EventStream
from openhands.events.event import Event
from openhands.integrations.provider import ProviderToken, ProviderType, SecretStore
from openhands.llm.llm import LLM
from openhands.memory.memory import Memory
from openhands.microagent.microagent import BaseMicroAgent
@@ -101,11 +102,23 @@ def initialize_repository_for_runtime(
github_token = (
SecretStr(os.environ.get('GITHUB_TOKEN')) if not github_token else github_token
)
secret_store = (
SecretStore(
provider_tokens={
ProviderType.GITHUB: ProviderToken(token=SecretStr(github_token))
}
)
if github_token
else None
)
provider_tokens = secret_store.provider_tokens if secret_store else None
repo_directory = None
if selected_repository and github_token:
if selected_repository and provider_tokens:
logger.debug(f'Selected repository {selected_repository}.')
repo_directory = runtime.clone_repo(
github_token,
provider_tokens,
selected_repository,
None,
)

View File

@@ -22,6 +22,7 @@ class GitLabService(GitService):
def __init__(
self,
user_id: str | None = None,
external_auth_id: str | None = None,
external_auth_token: SecretStr | None = None,
token: SecretStr | None = None,
external_token_manager: bool = False,
@@ -46,7 +47,7 @@ class GitLabService(GitService):
def _has_token_expired(self, status_code: int) -> bool:
return status_code == 401
async def get_latest_token(self) -> SecretStr:
async def get_latest_token(self) -> SecretStr | None:
return self.token
async def _fetch_data(

View File

@@ -2,6 +2,7 @@ from __future__ import annotations
from enum import Enum
from types import MappingProxyType
from typing import Any, Coroutine, Literal, overload
from pydantic import (
BaseModel,
@@ -13,6 +14,9 @@ from pydantic import (
)
from pydantic.json import pydantic_encoder
from openhands.events.action.action import Action
from openhands.events.action.commands import CmdRunAction
from openhands.events.stream import EventStream
from openhands.integrations.github.github_service import GithubServiceImpl
from openhands.integrations.gitlab.gitlab_service import GitLabServiceImpl
from openhands.integrations.service_types import (
@@ -130,15 +134,23 @@ class ProviderHandler:
def __init__(
self,
provider_tokens: PROVIDER_TOKEN_TYPE,
external_auth_id: str | None = None,
external_auth_token: SecretStr | None = None,
external_token_manager: bool = False,
):
if not isinstance(provider_tokens, MappingProxyType):
raise TypeError(
f'provider_tokens must be a MappingProxyType, got {type(provider_tokens).__name__}'
)
self.service_class_map: dict[ProviderType, type[GitService]] = {
ProviderType.GITHUB: GithubServiceImpl,
ProviderType.GITLAB: GitLabServiceImpl,
}
# Create immutable copy through SecretStore
self.external_auth_id = external_auth_id
self.external_auth_token = external_auth_token
self.external_token_manager = external_token_manager
self._provider_tokens = provider_tokens
@property
@@ -152,8 +164,10 @@ class ProviderHandler:
service_class = self.service_class_map[provider]
return service_class(
user_id=token.user_id,
external_auth_id=self.external_auth_id,
external_auth_token=self.external_auth_token,
token=token.token,
external_token_manager=self.external_token_manager,
)
async def get_user(self) -> User:
@@ -166,14 +180,12 @@ class ProviderHandler:
continue
raise AuthenticationError('Need valid provider token')
async def get_latest_provider_tokens(self) -> dict[ProviderType, SecretStr]:
"""Get latest token from services"""
tokens = {}
for provider in self.provider_tokens:
service = self._get_service(provider)
tokens[provider] = await service.get_latest_token()
return tokens
async def _get_latest_provider_token(
self, provider: ProviderType
) -> SecretStr | None:
"""Get latest token from service"""
service = self._get_service(provider)
return await service.get_latest_token()
async def get_repositories(
self, page: int, per_page: int, sort: str, installation_id: int | None
@@ -190,3 +202,120 @@ class ProviderHandler:
except Exception:
continue
return all_repos
async def set_event_stream_secrets(
self,
event_stream: EventStream,
env_vars: dict[ProviderType, SecretStr] | None = None,
):
"""
This ensures that the latest provider tokens are masked from the event stream
It is called when the provider tokens are first initialized in the runtime or when tokens are re-exported with the latest working ones
Args:
event_stream: Agent session's event stream
env_vars: Dict of providers and their tokens that require updating
"""
if env_vars:
exposed_env_vars = self.expose_env_vars(env_vars)
else:
exposed_env_vars = await self.get_env_vars(expose_secrets=True)
event_stream.set_secrets(exposed_env_vars)
def expose_env_vars(
self, env_secrets: dict[ProviderType, SecretStr]
) -> dict[str, str]:
"""
Return string values instead of typed values for environment secrets
Called just before exporting secrets to runtime, or setting secrets in the event stream
"""
exposed_envs = {}
for provider, token in env_secrets.items():
env_key = ProviderHandler.get_provider_env_key(provider)
exposed_envs[env_key] = token.get_secret_value()
return exposed_envs
@overload
def get_env_vars(
self,
expose_secrets: Literal[True],
providers: list[ProviderType] | None = ...,
get_latest: bool = False,
) -> Coroutine[Any, Any, dict[str, str]]: ...
@overload
def get_env_vars(
self,
expose_secrets: Literal[False],
providers: list[ProviderType] | None = ...,
get_latest: bool = False,
) -> Coroutine[Any, Any, dict[ProviderType, SecretStr]]: ...
async def get_env_vars(
self,
expose_secrets: bool = False,
providers: list[ProviderType] | None = None,
get_latest: bool = False,
) -> dict[ProviderType, SecretStr] | dict[str, str]:
"""
Retrieves the provider tokens from ProviderHandler object
This is used when initializing/exporting new provider tokens in the runtime
Args:
expose_secrets: Flag which returns strings instead of secrets
providers: Return provider tokens for the list passed in, otherwise return all available providers
get_latest: Get the latest working token for the providers if True, otherwise get the existing ones
"""
if not self.provider_tokens:
return {}
env_vars: dict[ProviderType, SecretStr] = {}
all_providers = [provider for provider in ProviderType]
provider_list = providers if providers else all_providers
for provider in provider_list:
if provider in self.provider_tokens:
token = (
self.provider_tokens[provider].token
if self.provider_tokens
else SecretStr('')
)
if get_latest:
token = await self._get_latest_provider_token(provider)
if token:
env_vars[provider] = token
if not expose_secrets:
return env_vars
return self.expose_env_vars(env_vars)
@classmethod
def check_cmd_action_for_provider_token_ref(
cls, event: Action
) -> list[ProviderType]:
"""
Detect if agent run action is using a provider token (e.g $GITHUB_TOKEN)
Returns a list of providers which are called by the agent
"""
if not isinstance(event, CmdRunAction):
return []
called_providers = []
for provider in ProviderType:
if ProviderHandler.get_provider_env_key(provider) in event.command.lower():
called_providers.append(provider)
return called_providers
@classmethod
def get_provider_env_key(cls, provider: ProviderType) -> str:
"""
Map ProviderType value to the environment variable name in the runtime
"""
return f'{provider.value}_token'.lower()

View File

@@ -52,16 +52,17 @@ class GitService(Protocol):
def __init__(
self,
user_id: str | None,
token: SecretStr | None,
external_auth_token: SecretStr | None,
user_id: str | None = None,
token: SecretStr | None = None,
external_auth_id: str | None = None,
external_auth_token: SecretStr | None = None,
external_token_manager: bool = False,
) -> None:
"""Initialize the service with authentication details"""
...
async def get_latest_token(self) -> SecretStr:
"""Get latest working token of the users"""
async def get_latest_token(self) -> SecretStr | None:
"""Get latest working token of the user"""
...
async def get_user(self) -> User:

View File

@@ -9,7 +9,8 @@ import string
import tempfile
from abc import abstractmethod
from pathlib import Path
from typing import Callable
from types import MappingProxyType
from typing import Callable, cast
from zipfile import ZipFile
from pydantic import SecretStr
@@ -41,7 +42,11 @@ from openhands.events.observation import (
UserRejectObservation,
)
from openhands.events.serialization.action import ACTION_TYPE_TO_CLASS
from openhands.integrations.github.github_service import GithubServiceImpl
from openhands.integrations.provider import (
PROVIDER_TOKEN_TYPE,
ProviderHandler,
ProviderType,
)
from openhands.microagent import (
BaseMicroAgent,
load_microagents_from_dir,
@@ -52,7 +57,11 @@ from openhands.runtime.plugins import (
VSCodeRequirement,
)
from openhands.runtime.utils.edit import FileEditRuntimeMixin
from openhands.utils.async_utils import call_sync_from_async
from openhands.utils.async_utils import (
GENERAL_TIMEOUT,
call_async_from_sync,
call_sync_from_async,
)
STATUS_MESSAGES = {
'STATUS$STARTING_RUNTIME': 'Starting runtime...',
@@ -98,6 +107,7 @@ class Runtime(FileEditRuntimeMixin):
attach_to_existing: bool = False,
headless_mode: bool = False,
user_id: str | None = None,
git_provider_tokens: PROVIDER_TOKEN_TYPE | None = None,
):
self.sid = sid
self.event_stream = event_stream
@@ -121,6 +131,17 @@ class Runtime(FileEditRuntimeMixin):
if env_vars is not None:
self.initial_env_vars.update(env_vars)
self.provider_handler = ProviderHandler(
provider_tokens=git_provider_tokens
or cast(PROVIDER_TOKEN_TYPE, MappingProxyType({})),
external_auth_id=user_id,
external_token_manager=True,
)
raw_env_vars: dict[str, str] = call_async_from_sync(
self.provider_handler.get_env_vars, GENERAL_TIMEOUT, True, None, False
)
self.initial_env_vars.update(raw_env_vars)
self._vscode_enabled = any(
isinstance(plugin, VSCodeRequirement) for plugin in self.plugins
)
@@ -171,6 +192,8 @@ class Runtime(FileEditRuntimeMixin):
# ====================================================================
def add_env_vars(self, env_vars: dict[str, str]) -> None:
env_vars = {key.upper(): value for key, value in env_vars.items()}
# Add env vars to the IPython shell (if Jupyter is used)
if any(isinstance(plugin, JupyterRequirement) for plugin in self.plugins):
code = 'import os\n'
@@ -216,56 +239,62 @@ class Runtime(FileEditRuntimeMixin):
if isinstance(event, Action):
asyncio.get_event_loop().run_until_complete(self._handle_action(event))
async def _export_latest_git_provider_tokens(self, event: Action) -> None:
"""
Refresh runtime provider tokens when agent attemps to run action with provider token
"""
if not self.user_id:
return
providers_called = ProviderHandler.check_cmd_action_for_provider_token_ref(
event
)
if not providers_called:
return
logger.info(f'Fetching latest github token for runtime: {self.sid}')
env_vars = await self.provider_handler.get_env_vars(
providers=providers_called, expose_secrets=False, get_latest=True
)
# This statement is to debug expired github tokens, and will be removed later
if ProviderType.GITHUB not in env_vars:
logger.error(f'Failed to refresh github token for runtime: {self.sid}')
return
if len(env_vars) == 0:
return
raw_token = env_vars[ProviderType.GITHUB].get_secret_value()
if not self.prev_token:
logger.info(
f'Setting github token in runtime: {self.sid}\nToken value: {raw_token[0:5]}; length: {len(raw_token)}'
)
elif self.prev_token.get_secret_value() != raw_token:
logger.info(
f'Setting new github token in runtime {self.sid}\nToken value: {raw_token[0:5]}; length: {len(raw_token)}'
)
self.prev_token = SecretStr(raw_token)
try:
await self.provider_handler.set_event_stream_secrets(
self.event_stream, env_vars=env_vars
)
self.add_env_vars(self.provider_handler.expose_env_vars(env_vars))
except Exception as e:
logger.warning(
f'Failed export latest github token to runtime: {self.sid}, {e}'
)
async def _handle_action(self, event: Action) -> None:
if event.timeout is None:
# We don't block the command if this is a default timeout action
event.set_hard_timeout(self.config.sandbox.timeout, blocking=False)
assert event.timeout is not None
try:
if isinstance(event, CmdRunAction):
if self.user_id and 'GITHUB_TOKEN' in event.command:
gh_client = GithubServiceImpl(
external_auth_id=self.user_id, external_token_manager=True
)
logger.info(f'Fetching latest github token for runtime: {self.sid}')
token = await gh_client.get_latest_token()
if not token:
logger.error(
f'Failed to refresh github token for runtime: {self.sid}'
)
if token:
raw_token = token.get_secret_value()
if not self.prev_token:
logger.info(
f'Setting github token in runtime: {self.sid}\nToken value: {raw_token[0:5]}; length: {len(raw_token)}'
)
elif self.prev_token.get_secret_value() != raw_token:
logger.info(
f'Setting new github token in runtime {self.sid}\nToken value: {raw_token[0:5]}; length: {len(raw_token)}'
)
self.prev_token = token
env_vars = {
'GITHUB_TOKEN': raw_token,
}
try:
self.add_env_vars(env_vars)
except Exception as e:
logger.warning(
f'Failed export latest github token to runtime: {self.sid}, {e}'
)
self.event_stream.update_secrets(
{
'github_token': raw_token,
}
)
await self._export_latest_git_provider_tokens(event)
observation: Observation = await call_sync_from_async(
self.run_action, event
)
@@ -293,14 +322,20 @@ class Runtime(FileEditRuntimeMixin):
def clone_repo(
self,
github_token: SecretStr,
git_provider_tokens: PROVIDER_TOKEN_TYPE,
selected_repository: str,
selected_branch: str | None,
) -> str:
if not github_token or not selected_repository:
if (
ProviderType.GITHUB not in git_provider_tokens
or not git_provider_tokens[ProviderType.GITHUB].token
or not selected_repository
):
raise ValueError(
'github_token and selected_repository must be provided to clone a repository'
)
github_token: SecretStr = git_provider_tokens[ProviderType.GITHUB].token
url = f'https://{github_token.get_secret_value()}@github.com/{selected_repository}.git'
dir_name = selected_repository.split('/')[-1]

View File

@@ -35,6 +35,7 @@ from openhands.events.observation import (
)
from openhands.events.serialization import event_to_dict, observation_from_dict
from openhands.events.serialization.action import ACTION_TYPE_TO_CLASS
from openhands.integrations.provider import PROVIDER_TOKEN_TYPE
from openhands.runtime.base import Runtime
from openhands.runtime.plugins import PluginRequirement
from openhands.runtime.utils.request import send_request
@@ -59,6 +60,7 @@ class ActionExecutionClient(Runtime):
attach_to_existing: bool = False,
headless_mode: bool = True,
user_id: str | None = None,
git_provider_tokens: PROVIDER_TOKEN_TYPE | None = None
):
self.session = HttpSession()
self.action_semaphore = threading.Semaphore(1) # Ensure one action at a time
@@ -75,6 +77,7 @@ class ActionExecutionClient(Runtime):
attach_to_existing,
headless_mode,
user_id,
git_provider_tokens,
)
@abstractmethod

View File

@@ -16,6 +16,7 @@ from openhands.core.exceptions import (
)
from openhands.core.logger import openhands_logger as logger
from openhands.events import EventStream
from openhands.integrations.provider import PROVIDER_TOKEN_TYPE
from openhands.runtime.builder.remote import RemoteRuntimeBuilder
from openhands.runtime.impl.action_execution.action_execution_client import (
ActionExecutionClient,
@@ -47,6 +48,7 @@ class RemoteRuntime(ActionExecutionClient):
attach_to_existing: bool = False,
headless_mode: bool = True,
user_id: str | None = None,
git_provider_tokens: PROVIDER_TOKEN_TYPE | None = None,
):
super().__init__(
config,
@@ -58,6 +60,7 @@ class RemoteRuntime(ActionExecutionClient):
attach_to_existing,
headless_mode,
user_id,
git_provider_tokens,
)
if self.config.sandbox.api_key is None:
raise ValueError(

View File

@@ -28,7 +28,6 @@ def get_github_token(request: Request) -> SecretStr | None:
def get_github_user_id(request: Request) -> str | None:
provider_tokens = get_provider_tokens(request)
if provider_tokens and ProviderType.GITHUB in provider_tokens:
return provider_tokens[ProviderType.GITHUB].user_id

View File

@@ -3,15 +3,15 @@ from datetime import datetime, timezone
from fastapi import APIRouter, Body, Request, status
from fastapi.responses import JSONResponse
from pydantic import BaseModel, SecretStr
from pydantic import BaseModel
from openhands.core.logger import openhands_logger as logger
from openhands.events.action.message import MessageAction
from openhands.integrations.github.github_service import GithubServiceImpl
from openhands.integrations.provider import ProviderType
from openhands.integrations.provider import (
PROVIDER_TOKEN_TYPE,
)
from openhands.runtime import get_runtime_cls
from openhands.server.auth import (
get_access_token,
get_github_user_id,
get_provider_tokens,
get_user_id,
@@ -44,7 +44,7 @@ class InitSessionRequest(BaseModel):
async def _create_new_conversation(
user_id: str | None,
token: SecretStr | None,
git_provider_tokens: PROVIDER_TOKEN_TYPE | None,
selected_repository: str | None,
selected_branch: str | None,
initial_user_msg: str | None,
@@ -78,7 +78,7 @@ async def _create_new_conversation(
logger.warn('Settings not present, not starting conversation')
raise MissingSettingsError('Settings not found')
session_init_args['provider_token'] = token
session_init_args['git_provider_tokens'] = git_provider_tokens
session_init_args['selected_repository'] = selected_repository
session_init_args['selected_branch'] = selected_branch
conversation_init_data = ConversationInitData(**session_init_args)
@@ -146,19 +146,7 @@ async def new_conversation(request: Request, data: InitSessionRequest):
using the returned conversation ID.
"""
logger.info('Initializing new conversation')
user_id = None
github_token = None
provider_tokens = get_provider_tokens(request)
if provider_tokens and ProviderType.GITHUB in provider_tokens:
token = provider_tokens[ProviderType.GITHUB]
user_id = token.user_id
gh_client = GithubServiceImpl(
user_id=user_id,
external_auth_token=get_access_token(request),
token=token.token,
)
github_token = await gh_client.get_latest_token()
selected_repository = data.selected_repository
selected_branch = data.selected_branch
initial_user_msg = data.initial_user_msg
@@ -168,7 +156,7 @@ async def new_conversation(request: Request, data: InitSessionRequest):
# Create conversation with initial message
conversation_id = await _create_new_conversation(
get_user_id(request),
github_token,
provider_tokens,
selected_repository,
selected_branch,
initial_user_msg,

View File

@@ -1,9 +1,8 @@
import asyncio
import time
from logging import LoggerAdapter
from typing import Callable
from pydantic import SecretStr
from types import MappingProxyType
from typing import Callable, cast
from openhands.controller import AgentController
from openhands.controller.agent import Agent
@@ -15,6 +14,7 @@ from openhands.core.schema.agent import AgentState
from openhands.events.action import ChangeAgentStateAction, MessageAction
from openhands.events.event import EventSource
from openhands.events.stream import EventStream
from openhands.integrations.provider import PROVIDER_TOKEN_TYPE, ProviderHandler
from openhands.memory.memory import Memory
from openhands.microagent.microagent import BaseMicroAgent
from openhands.runtime import get_runtime_cls
@@ -78,10 +78,10 @@ class AgentSession:
config: AppConfig,
agent: Agent,
max_iterations: int,
git_provider_tokens: PROVIDER_TOKEN_TYPE | None = None,
max_budget_per_task: float | None = None,
agent_to_llm_config: dict[str, LLMConfig] | None = None,
agent_configs: dict[str, AgentConfig] | None = None,
github_token: SecretStr | None = None,
selected_repository: str | None = None,
selected_branch: str | None = None,
initial_message: MessageAction | None = None,
@@ -115,7 +115,7 @@ class AgentSession:
runtime_name=runtime_name,
config=config,
agent=agent,
github_token=github_token,
git_provider_tokens=git_provider_tokens,
selected_repository=selected_repository,
selected_branch=selected_branch,
)
@@ -137,12 +137,12 @@ class AgentSession:
repo_directory=repo_directory,
)
if github_token:
self.event_stream.set_secrets(
{
'github_token': github_token.get_secret_value(),
}
if git_provider_tokens:
provider_handler = ProviderHandler(provider_tokens=git_provider_tokens)
await provider_handler.set_event_stream_secrets(
self.event_stream
)
if not self._closed:
if initial_message:
self.event_stream.add_event(initial_message, EventSource.USER)
@@ -212,7 +212,7 @@ class AgentSession:
runtime_name: str,
config: AppConfig,
agent: Agent,
github_token: SecretStr | None = None,
git_provider_tokens: PROVIDER_TOKEN_TYPE | None = None,
selected_repository: str | None = None,
selected_branch: str | None = None,
) -> bool:
@@ -232,17 +232,6 @@ class AgentSession:
self.logger.debug(f'Initializing runtime `{runtime_name}` now...')
runtime_cls = get_runtime_cls(runtime_name)
env_vars = (
{
'GITHUB_TOKEN': github_token.get_secret_value(),
}
if github_token
else None
)
kwargs = {}
if runtime_cls == RemoteRuntime:
kwargs['user_id'] = self.user_id
self.runtime = runtime_cls(
config=config,
@@ -252,8 +241,8 @@ class AgentSession:
status_callback=self._status_callback,
headless_mode=False,
attach_to_existing=False,
env_vars=env_vars,
**kwargs,
git_provider_tokens=git_provider_tokens,
user_id=self.user_id,
)
# FIXME: this sleep is a terrible hack.
@@ -271,10 +260,10 @@ class AgentSession:
)
return False
if selected_repository:
if selected_repository and git_provider_tokens:
await call_sync_from_async(
self.runtime.clone_repo,
github_token,
git_provider_tokens,
selected_repository,
selected_branch,
)

View File

@@ -1,5 +1,7 @@
from pydantic import Field, SecretStr
from types import MappingProxyType
from pydantic import Field
from openhands.integrations.provider import PROVIDER_TOKEN_TYPE
from openhands.server.settings import Settings
@@ -8,6 +10,10 @@ class ConversationInitData(Settings):
Session initialization data for the web environment - a deep copy of the global config is made and then overridden with this data.
"""
provider_token: SecretStr | None = Field(default=None)
git_provider_tokens: PROVIDER_TOKEN_TYPE | None = Field(default=None, frozen=True)
selected_repository: str | None = Field(default=None)
selected_branch: str | None = Field(default=None)
model_config = {
'arbitrary_types_allowed': True,
}

View File

@@ -23,6 +23,7 @@ from openhands.events.observation import (
from openhands.events.observation.error import ErrorObservation
from openhands.events.serialization import event_from_dict, event_to_dict
from openhands.events.stream import EventStreamSubscriber
from openhands.integrations.provider import PROVIDER_TOKEN_TYPE
from openhands.llm.llm import LLM
from openhands.server.session.agent_session import AgentSession
from openhands.server.session.conversation_init_data import ConversationInitData
@@ -133,11 +134,11 @@ class Session:
agent = Agent.get_cls(agent_cls)(llm, agent_config)
provider_token = None
git_provider_tokens = None
selected_repository = None
selected_branch = None
if isinstance(settings, ConversationInitData):
provider_token = settings.provider_token
git_provider_tokens = settings.git_provider_tokens
selected_repository = settings.selected_repository
selected_branch = settings.selected_branch
@@ -150,7 +151,7 @@ class Session:
max_budget_per_task=self.config.max_budget_per_task,
agent_to_llm_config=self.config.get_agent_to_llm_config_map(),
agent_configs=self.config.get_agent_configs(),
github_token=provider_token,
git_provider_tokens=git_provider_tokens,
selected_repository=selected_repository,
selected_branch=selected_branch,
initial_message=initial_message,

142
poetry.lock generated
View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
[[package]]
name = "aiohappyeyeballs"
@@ -113,7 +113,7 @@ propcache = ">=0.2.0"
yarl = ">=1.17.0,<2.0"
[package.extras]
speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
[[package]]
name = "aiolimiter"
@@ -224,7 +224,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
[package.extras]
doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"]
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""]
trio = ["trio (>=0.26.1)"]
[[package]]
@@ -360,12 +360,12 @@ files = [
]
[package.extras]
benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""]
[[package]]
name = "babel"
@@ -380,7 +380,7 @@ files = [
]
[package.extras]
dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"]
dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""]
[[package]]
name = "bashlex"
@@ -408,9 +408,9 @@ files = [
[package.extras]
all = ["typing-extensions (>=3.10.0.0)"]
dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "mypy (>=0.800)", "numpy", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.1.0)", "tox (>=3.20.1)", "typing-extensions"]
dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "mypy (>=0.800) ; platform_python_implementation != \"PyPy\"", "numpy ; sys_platform != \"darwin\" and platform_python_implementation != \"PyPy\"", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.1.0)", "tox (>=3.20.1)", "typing-extensions ; python_version < \"3.9.0\""]
doc-rtd = ["furo (==2022.6.21)", "sphinx (==4.1.0)"]
test-tox = ["mypy (>=0.800)", "numpy", "pytest (>=4.0.0)", "sphinx", "typing-extensions"]
test-tox = ["mypy (>=0.800) ; platform_python_implementation != \"PyPy\"", "numpy ; sys_platform != \"darwin\" and platform_python_implementation != \"PyPy\"", "pytest (>=4.0.0)", "sphinx", "typing-extensions ; python_version < \"3.9.0\""]
test-tox-coverage = ["coverage (>=5.5)"]
[[package]]
@@ -699,7 +699,7 @@ pyproject_hooks = "*"
[package.extras]
docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"]
test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"]
test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0) ; python_version < \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.11\"", "setuptools (>=67.8.0) ; python_version >= \"3.12\"", "wheel (>=0.36.0)"]
typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"]
uv = ["uv (>=0.1.18)"]
virtualenv = ["virtualenv (>=20.0.35)"]
@@ -1168,7 +1168,7 @@ files = [
]
[package.extras]
toml = ["tomli"]
toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
[[package]]
name = "cryptography"
@@ -1215,10 +1215,10 @@ files = [
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
[package.extras]
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"]
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""]
docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"]
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""]
pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
sdist = ["build (>=1.0.0)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
@@ -1285,17 +1285,17 @@ tqdm = ">=4.66.3"
xxhash = "*"
[package.extras]
audio = ["librosa", "soundfile (>=0.12.1)", "soxr (>=0.4.0)"]
audio = ["librosa", "soundfile (>=0.12.1)", "soxr (>=0.4.0) ; python_version >= \"3.9\""]
benchmarks = ["tensorflow (==2.12.0)", "torch (==2.0.1)", "transformers (==4.30.1)"]
dev = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.8.0.post1)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "ruff (>=0.3.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0)", "sqlalchemy", "tensorflow (>=2.16.0)", "tensorflow (>=2.6.0)", "tensorflow (>=2.6.0)", "tiktoken", "torch", "torch (>=2.0.0)", "torchdata", "transformers", "transformers (>=4.42.0)", "zstandard"]
dev = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.8.0.post1)", "jax (>=0.3.14) ; sys_platform != \"win32\"", "jaxlib (>=0.3.14) ; sys_platform != \"win32\"", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "ruff (>=0.3.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0) ; python_version >= \"3.9\"", "sqlalchemy", "tensorflow (>=2.16.0) ; python_version >= \"3.10\"", "tensorflow (>=2.6.0)", "tensorflow (>=2.6.0) ; python_version < \"3.10\"", "tiktoken", "torch", "torch (>=2.0.0)", "torchdata", "transformers", "transformers (>=4.42.0)", "zstandard"]
docs = ["s3fs", "tensorflow (>=2.6.0)", "torch", "transformers"]
jax = ["jax (>=0.3.14)", "jaxlib (>=0.3.14)"]
quality = ["ruff (>=0.3.0)"]
s3 = ["s3fs"]
tensorflow = ["tensorflow (>=2.6.0)"]
tensorflow-gpu = ["tensorflow (>=2.6.0)"]
tests = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.8.0.post1)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0)", "sqlalchemy", "tensorflow (>=2.16.0)", "tensorflow (>=2.6.0)", "tiktoken", "torch (>=2.0.0)", "torchdata", "transformers (>=4.42.0)", "zstandard"]
tests-numpy2 = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0)", "sqlalchemy", "tiktoken", "torch (>=2.0.0)", "torchdata", "transformers (>=4.42.0)", "zstandard"]
tests = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.8.0.post1)", "jax (>=0.3.14) ; sys_platform != \"win32\"", "jaxlib (>=0.3.14) ; sys_platform != \"win32\"", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0) ; python_version >= \"3.9\"", "sqlalchemy", "tensorflow (>=2.16.0) ; python_version >= \"3.10\"", "tensorflow (>=2.6.0) ; python_version < \"3.10\"", "tiktoken", "torch (>=2.0.0)", "torchdata", "transformers (>=4.42.0)", "zstandard"]
tests-numpy2 = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "jax (>=0.3.14) ; sys_platform != \"win32\"", "jaxlib (>=0.3.14) ; sys_platform != \"win32\"", "joblib (<1.3.0)", "joblibspark", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0) ; python_version >= \"3.9\"", "sqlalchemy", "tiktoken", "torch (>=2.0.0)", "torchdata", "transformers (>=4.42.0)", "zstandard"]
torch = ["torch"]
vision = ["Pillow (>=9.4.0)"]
@@ -1417,7 +1417,7 @@ files = [
wrapt = ">=1.10,<2"
[package.extras]
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"]
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"]
[[package]]
name = "dill"
@@ -1627,7 +1627,7 @@ files = [
]
[package.extras]
tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""]
[[package]]
name = "faker"
@@ -1725,7 +1725,7 @@ files = [
[package.extras]
docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"]
typing = ["typing-extensions (>=4.12.2)"]
typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""]
[[package]]
name = "flake8"
@@ -1828,18 +1828,18 @@ files = [
]
[package.extras]
all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"]
all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"]
graphite = ["lz4 (>=1.7.4.2)"]
interpolatable = ["munkres", "pycairo", "scipy"]
interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""]
lxml = ["lxml (>=4.0)"]
pathops = ["skia-pathops (>=0.5.0)"]
plot = ["matplotlib"]
repacker = ["uharfbuzz (>=0.23.0)"]
symfont = ["sympy"]
type1 = ["xattr"]
type1 = ["xattr ; sys_platform == \"darwin\""]
ufo = ["fs (>=2.2.0,<3)"]
unicode = ["unicodedata2 (>=15.1.0)"]
woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"]
unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""]
woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"]
[[package]]
name = "fqdn"
@@ -2103,11 +2103,11 @@ greenlet = {version = ">=3.0rc3", markers = "platform_python_implementation == \
"zope.interface" = "*"
[package.extras]
dnspython = ["dnspython (>=1.16.0,<2.0)", "idna"]
dnspython = ["dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\""]
docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"]
monitor = ["psutil (>=5.7.0)"]
recommended = ["cffi (>=1.12.2)", "dnspython (>=1.16.0,<2.0)", "idna", "psutil (>=5.7.0)"]
test = ["cffi (>=1.12.2)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idna", "objgraph", "psutil (>=5.7.0)", "requests"]
monitor = ["psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""]
recommended = ["cffi (>=1.12.2) ; platform_python_implementation == \"CPython\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""]
test = ["cffi (>=1.12.2) ; platform_python_implementation == \"CPython\"", "coverage (>=5.0) ; sys_platform != \"win32\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "objgraph", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\"", "requests"]
[[package]]
name = "ghapi"
@@ -2160,7 +2160,7 @@ gitdb = ">=4.0.1,<5"
[package.extras]
doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"]
test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"]
test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock ; python_version < \"3.8\"", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions ; python_version < \"3.11\""]
[[package]]
name = "google-ai-generativelanguage"
@@ -2179,7 +2179,7 @@ google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extr
google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev"
proto-plus = [
{version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""},
{version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""},
{version = ">=1.22.3,<2.0.0dev"},
]
protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev"
@@ -2202,14 +2202,14 @@ grpcio = {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_versi
grpcio-status = {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}
proto-plus = [
{version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""},
{version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""},
{version = ">=1.22.3,<2.0.0dev"},
]
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0"
requests = ">=2.18.0,<3.0.0.dev0"
[package.extras]
async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"]
grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"]
grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0) ; python_version >= \"3.11\""]
grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
@@ -2324,10 +2324,10 @@ ag2-testing = ["absl-py", "ag2[gemini]", "cloudpickle (>=3.0,<4.0)", "google-clo
agent-engines = ["cloudpickle (>=3.0,<4.0)", "google-cloud-logging (<4)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "packaging (>=24.0)", "pydantic (>=2.10,<3)", "typing-extensions"]
autologging = ["mlflow (>=1.27.0,<=2.16.0)"]
cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"]
datasets = ["pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0dev) ; python_version < \"3.11\""]
endpoint = ["requests (>=2.28.1)"]
evaluation = ["pandas (>=1.0.0)", "scikit-learn", "scikit-learn (<1.6.0)", "tqdm (>=4.23.0)"]
full = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "scikit-learn", "scikit-learn (<1.6.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"]
evaluation = ["pandas (>=1.0.0)", "scikit-learn (<1.6.0) ; python_version <= \"3.10\"", "scikit-learn ; python_version > \"3.10\"", "tqdm (>=4.23.0)"]
full = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0dev) ; python_version < \"3.11\"", "pyarrow (>=6.0.1)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.33.0) ; python_version == \"3.11\"", "requests (>=2.28.1)", "scikit-learn (<1.6.0) ; python_version <= \"3.10\"", "scikit-learn ; python_version > \"3.10\"", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev) ; python_version <= \"3.11\"", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"]
langchain = ["langchain (>=0.3,<0.4)", "langchain-core (>=0.3,<0.4)", "langchain-google-vertexai (>=2,<3)", "langgraph (>=0.2.45,<0.3)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)"]
langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.3,<0.4)", "langchain-core (>=0.3,<0.4)", "langchain-google-vertexai (>=2,<3)", "langgraph (>=0.2.45,<0.3)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "typing-extensions"]
lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"]
@@ -2335,11 +2335,11 @@ metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"]
pipelines = ["pyyaml (>=5.3.1,<7)"]
prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.114.0)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"]
private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"]
ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "setuptools (<70.0.0)"]
ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "ray[train]", "scikit-learn (<1.6.0)", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.33.0) ; python_version == \"3.11\"", "setuptools (<70.0.0)"]
ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.33.0) ; python_version == \"3.11\"", "ray[train]", "scikit-learn (<1.6.0)", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
reasoningengine = ["cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "typing-extensions"]
tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
testing = ["aiohttp", "bigframes", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "scikit-learn (<1.6.0)", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev) ; python_version <= \"3.11\"", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
testing = ["aiohttp", "bigframes ; python_version >= \"3.10\"", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0dev) ; python_version < \"3.11\"", "pyarrow (>=6.0.1)", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.33.0) ; python_version == \"3.11\"", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn (<1.6.0) ; python_version <= \"3.10\"", "scikit-learn ; python_version > \"3.10\"", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (==2.13.0) ; python_version <= \"3.11\"", "tensorflow (==2.16.1) ; python_version > \"3.11\"", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev) ; python_version <= \"3.11\"", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0) ; python_version <= \"3.11\"", "torch (>=2.2.0) ; python_version > \"3.11\"", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
tokenization = ["sentencepiece (>=0.2.0)"]
vizier = ["google-vizier (>=0.1.6)"]
xai = ["tensorflow (>=2.3.0,<3.0.0dev)"]
@@ -2368,12 +2368,12 @@ requests = ">=2.21.0,<3.0.0dev"
[package.extras]
all = ["google-cloud-bigquery[bigquery-v2,bqstorage,geopandas,ipython,ipywidgets,opentelemetry,pandas,tqdm]"]
bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)"]
bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"]
bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "pyarrow (>=3.0.0)"]
geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<2.0dev)"]
ipython = ["bigquery-magics (>=0.1.0)"]
ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"]
opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"]
pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"]
pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"]
tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"]
[[package]]
@@ -2893,7 +2893,7 @@ httpcore = "==1.*"
idna = "*"
[package.extras]
brotli = ["brotli", "brotlicffi"]
brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
@@ -3028,7 +3028,7 @@ zipp = ">=0.5"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy ; platform_python_implementation != \"PyPy\"", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
[[package]]
name = "iniconfig"
@@ -3102,7 +3102,7 @@ traitlets = ">=5.13.0"
[package.extras]
all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"]
black = ["black"]
doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing_extensions"]
doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"]
kernel = ["ipykernel"]
matplotlib = ["matplotlib"]
nbconvert = ["nbconvert"]
@@ -3415,7 +3415,7 @@ traitlets = ">=5.3"
[package.extras]
docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"]
test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko ; sys_platform == \"win32\"", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"]
[[package]]
name = "jupyter-core"
@@ -4415,7 +4415,7 @@ files = [
[package.extras]
develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"]
docs = ["sphinx"]
gmpy = ["gmpy2 (>=2.1.0a4)"]
gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""]
tests = ["pytest (>=4.6)"]
[[package]]
@@ -4813,7 +4813,7 @@ tornado = ">=6.2.0"
[package.extras]
dev = ["hatch", "pre-commit"]
docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"]
test = ["importlib-resources (>=5.0) ; python_version < \"3.10\"", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"]
[[package]]
name = "notebook-shim"
@@ -5323,7 +5323,7 @@ docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline
fpx = ["olefile"]
mic = ["olefile"]
tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"]
typing = ["typing-extensions"]
typing = ["typing-extensions ; python_version < \"3.10\""]
xmp = ["defusedxml"]
[[package]]
@@ -5805,7 +5805,7 @@ typing-extensions = ">=4.12.2"
[package.extras]
email = ["email-validator (>=2.0.0)"]
timezone = ["tzdata"]
timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""]
[[package]]
name = "pydantic-core"
@@ -5938,7 +5938,7 @@ numpy = ">=1.16.4"
[package.extras]
carto = ["pydeck-carto"]
jupyter = ["ipykernel (>=5.1.2)", "ipython (>=5.8.0)", "ipywidgets (>=7,<8)", "traitlets (>=4.3.2)"]
jupyter = ["ipykernel (>=5.1.2) ; python_version >= \"3.4\"", "ipython (>=5.8.0) ; python_version < \"3.4\"", "ipywidgets (>=7,<8)", "traitlets (>=4.3.2)"]
[[package]]
name = "pyee"
@@ -5956,7 +5956,7 @@ files = [
typing-extensions = "*"
[package.extras]
dev = ["black", "flake8", "flake8-black", "isort", "jupyter-console", "mkdocs", "mkdocs-include-markdown-plugin", "mkdocstrings[python]", "pytest", "pytest-asyncio", "pytest-trio", "toml", "tox", "trio", "trio", "trio-typing", "twine", "twisted", "validate-pyproject[all]"]
dev = ["black", "flake8", "flake8-black", "isort", "jupyter-console", "mkdocs", "mkdocs-include-markdown-plugin", "mkdocstrings[python]", "pytest", "pytest-asyncio ; python_version >= \"3.4\"", "pytest-trio ; python_version >= \"3.7\"", "toml", "tox", "trio", "trio ; python_version > \"3.6\"", "trio-typing ; python_version > \"3.6\"", "twine", "twisted", "validate-pyproject[all]"]
[[package]]
name = "pyflakes"
@@ -6338,7 +6338,7 @@ files = [
]
[package.extras]
dev = ["backports.zoneinfo", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec", "mypy", "orjson", "pylint", "pytest", "tzdata", "validate-pyproject[all]"]
dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"]
[[package]]
name = "python-levenshtein"
@@ -7370,7 +7370,7 @@ tifffile = ">=2022.8.12"
[package.extras]
build = ["Cython (>=3.0.8)", "build (>=1.2.1)", "meson-python (>=0.16)", "ninja (>=1.11.1.1)", "numpy (>=2.0)", "pythran (>=0.16)", "spin (==0.13)"]
data = ["pooch (>=1.6.0)"]
developer = ["ipython", "pre-commit", "tomli"]
developer = ["ipython", "pre-commit", "tomli ; python_version < \"3.11\""]
docs = ["PyWavelets (>=1.6)", "dask[array] (>=2023.2.0)", "intersphinx-registry (>=0.2411.14)", "ipykernel", "ipywidgets", "kaleido (==0.2.1)", "matplotlib (>=3.7)", "myst-parser", "numpydoc (>=1.7)", "pandas (>=2.0)", "plotly (>=5.20)", "pooch (>=1.6)", "pydata-sphinx-theme (>=0.16)", "pytest-doctestplus", "scikit-learn (>=1.2)", "seaborn (>=0.11)", "sphinx (>=8.0)", "sphinx-copybutton", "sphinx-gallery[parallel] (>=0.18)", "sphinx_design (>=0.5)", "tifffile (>=2022.8.12)"]
optional = ["PyWavelets (>=1.6)", "SimpleITK", "astropy (>=5.0)", "cloudpickle (>=1.1.1)", "dask[array] (>=2023.2.0)", "matplotlib (>=3.7)", "pooch (>=1.6.0)", "pyamg (>=5.2)", "scikit-learn (>=1.2)"]
test = ["asv", "numpydoc (>=1.7)", "pooch (>=1.6.0)", "pytest (>=8)", "pytest-cov (>=2.11.0)", "pytest-doctestplus", "pytest-faulthandler", "pytest-localserver"]
@@ -7437,7 +7437,7 @@ numpy = ">=1.23.5,<2.5"
[package.extras]
dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"]
doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.16.5)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"]
test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
[[package]]
name = "seaborn"
@@ -7474,9 +7474,9 @@ files = [
]
[package.extras]
nativelib = ["pyobjc-framework-Cocoa", "pywin32"]
objc = ["pyobjc-framework-Cocoa"]
win32 = ["pywin32"]
nativelib = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\"", "pywin32 ; sys_platform == \"win32\""]
objc = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\""]
win32 = ["pywin32 ; sys_platform == \"win32\""]
[[package]]
name = "setuptools"
@@ -7491,13 +7491,13 @@ files = [
]
[package.extras]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"]
core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""]
core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
enabler = ["pytest-enabler (>=2.2)"]
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"]
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"]
[[package]]
name = "shapely"
@@ -7747,7 +7747,7 @@ typing-extensions = ">=4.4.0,<5"
watchdog = {version = ">=2.1.5,<7", markers = "platform_system != \"Darwin\""}
[package.extras]
snowflake = ["snowflake-connector-python (>=3.3.0)", "snowflake-snowpark-python[modin] (>=1.17.0)"]
snowflake = ["snowflake-connector-python (>=3.3.0) ; python_version < \"3.12\"", "snowflake-snowpark-python[modin] (>=1.17.0) ; python_version < \"3.12\""]
[[package]]
name = "strenum"
@@ -8605,7 +8605,7 @@ files = [
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
@@ -8627,7 +8627,7 @@ click = ">=7.0"
h11 = ">=0.8"
[package.extras]
standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"]
[[package]]
name = "virtualenv"
@@ -8648,7 +8648,7 @@ platformdirs = ">=3.9.1,<5"
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""]
[[package]]
name = "watchdog"
@@ -9232,11 +9232,11 @@ files = [
]
[package.extras]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
type = ["pytest-mypy"]
[[package]]

View File

@@ -99,6 +99,8 @@ reportlab = "*"
[tool.coverage.run]
concurrency = ["gevent"]
[tool.poetry.group.runtime.dependencies]
jupyterlab = "*"
notebook = "*"
@@ -127,6 +129,8 @@ ignore = ["D1"]
[tool.ruff.lint.pydocstyle]
convention = "google"
[tool.poetry.group.evaluation.dependencies]
streamlit = "*"
whatthepatch = "*"

View File

@@ -3,6 +3,7 @@ from types import MappingProxyType
import pytest
from pydantic import SecretStr, ValidationError
from openhands.events.action.commands import CmdRunAction
from openhands.integrations.provider import (
ProviderHandler,
ProviderToken,
@@ -227,3 +228,146 @@ def test_token_conversion():
)
assert len(store6.provider_tokens.keys()) == 0
def test_provider_handler_type_enforcement():
with pytest.raises((TypeError)):
ProviderHandler(provider_tokens={'a': 'b'})
def test_expose_env_vars():
"""Test that expose_env_vars correctly exposes secrets as strings"""
tokens = MappingProxyType(
{
ProviderType.GITHUB: ProviderToken(token=SecretStr('test_token')),
ProviderType.GITLAB: ProviderToken(token=SecretStr('gitlab_token')),
}
)
handler = ProviderHandler(provider_tokens=tokens)
# Test with specific provider tokens
env_secrets = {
ProviderType.GITHUB: SecretStr('gh_token'),
ProviderType.GITLAB: SecretStr('gl_token'),
}
exposed = handler.expose_env_vars(env_secrets)
assert exposed['github_token'] == 'gh_token'
assert exposed['gitlab_token'] == 'gl_token'
@pytest.mark.asyncio
async def test_get_env_vars():
"""Test get_env_vars with different configurations"""
tokens = MappingProxyType(
{
ProviderType.GITHUB: ProviderToken(token=SecretStr('test_token')),
ProviderType.GITLAB: ProviderToken(token=SecretStr('gitlab_token')),
}
)
handler = ProviderHandler(provider_tokens=tokens)
# Test getting all tokens unexposed
env_vars = await handler.get_env_vars(expose_secrets=False)
assert isinstance(env_vars, dict)
assert isinstance(env_vars[ProviderType.GITHUB], SecretStr)
assert env_vars[ProviderType.GITHUB].get_secret_value() == 'test_token'
assert env_vars[ProviderType.GITLAB].get_secret_value() == 'gitlab_token'
# Test getting specific providers
env_vars = await handler.get_env_vars(
expose_secrets=False, providers=[ProviderType.GITHUB]
)
assert len(env_vars) == 1
assert ProviderType.GITHUB in env_vars
assert ProviderType.GITLAB not in env_vars
# Test exposed secrets
exposed_vars = await handler.get_env_vars(expose_secrets=True)
assert isinstance(exposed_vars, dict)
assert exposed_vars['github_token'] == 'test_token'
assert exposed_vars['gitlab_token'] == 'gitlab_token'
# Test empty tokens
empty_handler = ProviderHandler(provider_tokens=MappingProxyType({}))
empty_vars = await empty_handler.get_env_vars()
assert empty_vars == {}
@pytest.fixture
def event_stream():
"""Fixture for event stream testing"""
class TestEventStream:
def __init__(self):
self.secrets = {}
def set_secrets(self, secrets):
self.secrets = secrets
return TestEventStream()
@pytest.mark.asyncio
async def test_set_event_stream_secrets(event_stream):
"""Test setting secrets in event stream"""
tokens = MappingProxyType(
{
ProviderType.GITHUB: ProviderToken(token=SecretStr('test_token')),
ProviderType.GITLAB: ProviderToken(token=SecretStr('gitlab_token')),
}
)
handler = ProviderHandler(provider_tokens=tokens)
# Test with provided env_vars
env_vars = {
ProviderType.GITHUB: SecretStr('new_token'),
ProviderType.GITLAB: SecretStr('new_gitlab_token'),
}
await handler.set_event_stream_secrets(event_stream, env_vars)
assert event_stream.secrets == {
'github_token': 'new_token',
'gitlab_token': 'new_gitlab_token',
}
# Test without env_vars (using existing tokens)
await handler.set_event_stream_secrets(event_stream)
assert event_stream.secrets == {
'github_token': 'test_token',
'gitlab_token': 'gitlab_token',
}
def test_check_cmd_action_for_provider_token_ref():
"""Test detection of provider tokens in command actions"""
# Test command with GitHub token
cmd = CmdRunAction(command='echo $GITHUB_TOKEN')
providers = ProviderHandler.check_cmd_action_for_provider_token_ref(cmd)
assert ProviderType.GITHUB in providers
assert len(providers) == 1
# Test command with multiple tokens
cmd = CmdRunAction(command='echo $GITHUB_TOKEN && echo $GITLAB_TOKEN')
providers = ProviderHandler.check_cmd_action_for_provider_token_ref(cmd)
assert ProviderType.GITHUB in providers
assert ProviderType.GITLAB in providers
assert len(providers) == 2
# Test command without tokens
cmd = CmdRunAction(command='echo "Hello"')
providers = ProviderHandler.check_cmd_action_for_provider_token_ref(cmd)
assert len(providers) == 0
# Test non-command action
from openhands.events.action import MessageAction
msg = MessageAction(content='test')
providers = ProviderHandler.check_cmd_action_for_provider_token_ref(msg)
assert len(providers) == 0
def test_get_provider_env_key():
"""Test provider environment key generation"""
assert ProviderHandler.get_provider_env_key(ProviderType.GITHUB) == 'github_token'
assert ProviderHandler.get_provider_env_key(ProviderType.GITLAB) == 'gitlab_token'

View File

@@ -0,0 +1,182 @@
from types import MappingProxyType
import pytest
from pydantic import SecretStr
from openhands.core.config import AppConfig
from openhands.events.action import Action
from openhands.events.action.commands import CmdRunAction
from openhands.events.observation import NullObservation, Observation
from openhands.events.stream import EventStream
from openhands.integrations.provider import ProviderToken, ProviderType
from openhands.runtime.base import Runtime
from openhands.storage import get_file_store
class TestRuntime(Runtime):
"""A concrete implementation of Runtime for testing"""
async def connect(self):
pass
def close(self):
pass
def browse(self, action):
return NullObservation()
def browse_interactive(self, action):
return NullObservation()
def run(self, action):
return NullObservation()
def run_ipython(self, action):
return NullObservation()
def read(self, action):
return NullObservation()
def write(self, action):
return NullObservation()
def copy_from(self, path):
return ''
def copy_to(self, path, content):
pass
def list_files(self, path):
return []
def run_action(self, action: Action) -> Observation:
return NullObservation()
@pytest.fixture
def temp_dir(tmp_path_factory: pytest.TempPathFactory) -> str:
return str(tmp_path_factory.mktemp('test_event_stream'))
@pytest.fixture
def runtime(temp_dir):
"""Fixture for runtime testing"""
config = AppConfig()
git_provider_tokens = MappingProxyType(
{ProviderType.GITHUB: ProviderToken(token=SecretStr('test_token'))}
)
file_store = get_file_store('local', temp_dir)
event_stream = EventStream('abc', file_store)
runtime = TestRuntime(
config=config,
event_stream=event_stream,
sid='test',
user_id='test_user',
git_provider_tokens=git_provider_tokens,
)
return runtime
@pytest.mark.asyncio
async def test_export_latest_git_provider_tokens_no_user_id(temp_dir):
"""Test that no token export happens when user_id is not set"""
config = AppConfig()
file_store = get_file_store('local', temp_dir)
event_stream = EventStream('abc', file_store)
runtime = TestRuntime(config=config, event_stream=event_stream, sid='test')
# Create a command that would normally trigger token export
cmd = CmdRunAction(command='echo $GITHUB_TOKEN')
# This should not raise any errors and should return None
await runtime._export_latest_git_provider_tokens(cmd)
# Verify no secrets were set
assert not event_stream.secrets
@pytest.mark.asyncio
async def test_export_latest_git_provider_tokens_no_token_ref(temp_dir):
"""Test that no token export happens when command doesn't reference tokens"""
config = AppConfig()
file_store = get_file_store('local', temp_dir)
event_stream = EventStream('abc', file_store)
runtime = TestRuntime(
config=config, event_stream=event_stream, sid='test', user_id='test_user'
)
# Create a command that doesn't reference any tokens
cmd = CmdRunAction(command='echo "hello"')
# This should not raise any errors and should return None
await runtime._export_latest_git_provider_tokens(cmd)
# Verify no secrets were set
assert not event_stream.secrets
@pytest.mark.asyncio
async def test_export_latest_git_provider_tokens_success(runtime):
"""Test successful token export when command references tokens"""
# Create a command that references the GitHub token
cmd = CmdRunAction(command='echo $GITHUB_TOKEN')
# Export the tokens
await runtime._export_latest_git_provider_tokens(cmd)
# Verify that the token was exported to the event stream
assert runtime.event_stream.secrets == {'github_token': 'test_token'}
@pytest.mark.asyncio
async def test_export_latest_git_provider_tokens_multiple_refs(temp_dir):
"""Test token export with multiple token references"""
config = AppConfig()
# Initialize with both GitHub and GitLab tokens
git_provider_tokens = MappingProxyType(
{
ProviderType.GITHUB: ProviderToken(token=SecretStr('github_token')),
ProviderType.GITLAB: ProviderToken(token=SecretStr('gitlab_token')),
}
)
file_store = get_file_store('local', temp_dir)
event_stream = EventStream('abc', file_store)
runtime = TestRuntime(
config=config,
event_stream=event_stream,
sid='test',
user_id='test_user',
git_provider_tokens=git_provider_tokens,
)
# Create a command that references multiple tokens
cmd = CmdRunAction(command='echo $GITHUB_TOKEN && echo $GITLAB_TOKEN')
# Export the tokens
await runtime._export_latest_git_provider_tokens(cmd)
# Verify that both tokens were exported
assert event_stream.secrets == {
'github_token': 'github_token',
'gitlab_token': 'gitlab_token',
}
@pytest.mark.asyncio
async def test_export_latest_git_provider_tokens_token_update(runtime):
"""Test that token updates are handled correctly"""
# First export with initial token
cmd = CmdRunAction(command='echo $GITHUB_TOKEN')
await runtime._export_latest_git_provider_tokens(cmd)
# Update the token
new_token = 'new_test_token'
runtime.provider_handler._provider_tokens = MappingProxyType(
{ProviderType.GITHUB: ProviderToken(token=SecretStr(new_token))}
)
# Export again with updated token
await runtime._export_latest_git_provider_tokens(cmd)
# Verify that the new token was exported
assert runtime.event_stream.secrets == {'github_token': new_token}