From e45d46c993e25c93d33436356afadd15cefa53bf Mon Sep 17 00:00:00 2001 From: Xingyao Wang Date: Fri, 12 Jul 2024 01:52:26 +0800 Subject: [PATCH] [Arch] Implement EventStream Runtime Client with Jupyter Support using Agnostic Sandbox (#2879) * support loading a particular runtime class via config.runtime (default to server to not break things) * move image agnostic util to shared runtime util * move dependency * include poetry.lock in sdist * accept port as arg for client * make client start server with specified port * update image agnostic utility for eventstream runtime * make client and runtime working with REST API * rename execute_server * add plugin to initialize stuff inside es-runtime; cleanup runtime methods to delegate everything to container * remove redundant ls -alh * fix jupyter * improve logging in agnostic sandbox * improve logging of test function * add read & edit * update agnostic sandbox * support setting work dir at start * fix file read/write test * fix unit test * update tescase * Fix unit test again * fix unit test again again --- opendevin/core/main.py | 5 +- opendevin/runtime/__init__.py | 32 +- opendevin/runtime/client/client.py | 389 +++--- opendevin/runtime/client/runtime.py | 409 +++--- opendevin/runtime/plugins/__init__.py | 14 +- .../runtime/plugins/agent_skills/__init__.py | 6 +- opendevin/runtime/plugins/jupyter/__init__.py | 63 +- .../{execute_server => execute_server.py} | 4 + opendevin/runtime/plugins/jupyter/setup.sh | 2 +- opendevin/runtime/plugins/requirement.py | 23 + opendevin/runtime/utils/image_agnostic.py | 157 ++- opendevin/runtime/utils/source.py | 32 + opendevin/server/session/agent.py | 19 +- poetry.lock | 1102 ++++++++++++++++- pyproject.toml | 20 +- tests/unit/test_image_agnostic_util.py | 23 +- 16 files changed, 1896 insertions(+), 404 deletions(-) rename opendevin/runtime/plugins/jupyter/{execute_server => execute_server.py} (98%) create mode 100644 opendevin/runtime/utils/source.py diff --git a/opendevin/core/main.py b/opendevin/core/main.py index b54b31de3b..0a1072984b 100644 --- a/opendevin/core/main.py +++ b/opendevin/core/main.py @@ -15,8 +15,8 @@ from opendevin.events.action import MessageAction from opendevin.events.event import Event from opendevin.events.observation import AgentStateChangedObservation from opendevin.llm.llm import LLM +from opendevin.runtime import get_runtime_cls from opendevin.runtime.sandbox import Sandbox -from opendevin.runtime.server.runtime import ServerRuntime def read_task_from_file(file_path: str) -> str: @@ -79,7 +79,8 @@ async def run_agent_controller( ) # runtime and tools - runtime = ServerRuntime(event_stream=event_stream, sandbox=sandbox) + runtime_cls = get_runtime_cls(config.runtime) + runtime = runtime_cls(event_stream=event_stream, sandbox=sandbox) runtime.init_sandbox_plugins(controller.agent.sandbox_plugins) runtime.init_runtime_tools( controller.agent.runtime_tools, diff --git a/opendevin/runtime/__init__.py b/opendevin/runtime/__init__.py index 0fc1c6ad23..72bb6ecf86 100644 --- a/opendevin/runtime/__init__.py +++ b/opendevin/runtime/__init__.py @@ -1,6 +1,36 @@ +from typing import TYPE_CHECKING, Type + from .docker.local_box import LocalBox from .docker.ssh_box import DockerSSHBox from .e2b.sandbox import E2BBox from .sandbox import Sandbox -__all__ = ['Sandbox', 'DockerSSHBox', 'E2BBox', 'LocalBox'] +if TYPE_CHECKING: + from .runtime import Runtime + + +def get_runtime_cls(name: str) -> Type['Runtime']: + # Local imports to avoid circular imports + if name == 'server': + from .server.runtime import ServerRuntime + + return ServerRuntime + elif name == 'client': + from .client.runtime import EventStreamRuntime + + return EventStreamRuntime + elif name == 'e2b': + from .e2b.runtime import E2BRuntime + + return E2BRuntime + else: + raise ValueError(f'Runtime {name} not supported') + + +__all__ = [ + 'DockerSSHBox', + 'E2BBox', + 'LocalBox', + 'Sandbox', + 'get_runtime_cls', +] diff --git a/opendevin/runtime/client/client.py b/opendevin/runtime/client/client.py index 2a9f80b5a3..e06ffe084a 100644 --- a/opendevin/runtime/client/client.py +++ b/opendevin/runtime/client/client.py @@ -1,229 +1,238 @@ +import argparse import asyncio import os -import websockets +from pathlib import Path + import pexpect -import json -import shutil -import re -from typing import Any -from websockets.exceptions import ConnectionClosed -from opendevin.events.serialization import event_to_dict, event_from_dict -from opendevin.events.observation import Observation -from opendevin.runtime.plugins import PluginRequirement +from fastapi import FastAPI, HTTPException, Request +from pydantic import BaseModel +from uvicorn import run + +from opendevin.core.logger import opendevin_logger as logger from opendevin.events.action import ( Action, CmdRunAction, + FileReadAction, + FileWriteAction, IPythonRunCellAction, ) from opendevin.events.observation import ( CmdOutputObservation, ErrorObservation, + FileReadObservation, + FileWriteObservation, Observation, - IPythonRunCellObservation ) +from opendevin.events.serialization import event_from_dict, event_to_dict from opendevin.runtime.plugins import ( - AgentSkillsRequirement, - JupyterRequirement, - PluginRequirement, + ALL_PLUGINS, + JupyterPlugin, + Plugin, ) +from opendevin.runtime.server.files import insert_lines, read_lines -class RuntimeClient(): - # This runtime will listen to the websocket - # When receive an event, it will run the action and send the observation back to the websocket +app = FastAPI() - def __init__(self) -> None: - self.init_shell() - # TODO: code will block at init_websocket, maybe we can open a subprocess to run websocket forever - # In case we need to run other code after init_websocket - self.init_websocket() - def init_websocket(self) -> None: - server = websockets.serve(self.listen, "0.0.0.0", 8080) - loop = asyncio.get_event_loop() - loop.run_until_complete(server) - loop.run_forever() - - def init_shell(self) -> None: - # run as root - self.shell = pexpect.spawn('/bin/bash', encoding='utf-8') - self.shell.expect(r'[$#] ') +class ActionRequest(BaseModel): + action: dict - async def listen(self, websocket): - try: - async for message in websocket: - event_str = json.loads(message) - event = event_from_dict(event_str) - if isinstance(event, Action): - observation = self.run_action(event) - await websocket.send(json.dumps(event_to_dict(observation))) - except ConnectionClosed: - print("Connection closed") - - def run_action(self, action) -> Observation: - # Should only receive Action CmdRunAction and IPythonRunCellAction - action_type = action.action # type: ignore[attr-defined] - observation = getattr(self, action_type)(action) - # TODO: see comments in https://github.com/OpenDevin/OpenDevin/pull/2603#discussion_r1668994137 - observation._parent = action.id # type: ignore[attr-defined] - return observation - - def run(self, action: CmdRunAction) -> Observation: - return self._run_command(action.command) - - def _run_command(self, command: str) -> Observation: - try: - output, exit_code = self.execute(command) - return CmdOutputObservation( - command_id=-1, content=str(output), command=command, exit_code=exit_code - ) - except UnicodeDecodeError: - return ErrorObservation('Command output could not be decoded as utf-8') - def clean_up(self,input_text): - # Remove escape sequences - cleaned_text = re.sub(r'\x1b\[[0-9;?]*[a-zA-Z]', '', input_text) - # Remove carriage returns and other control characters - cleaned_text = re.sub(r'[\r\n\t]', '', cleaned_text) - return cleaned_text +class RuntimeClient: + """RuntimeClient is running inside docker sandbox. + It is responsible for executing actions received from OpenDevin backend and producing observations. + """ - def execute(self, command): - print(f"Received command: {command}") + def __init__(self, plugins_to_load: list[Plugin], work_dir: str) -> None: + self._init_bash_shell(work_dir) + self.lock = asyncio.Lock() + self.plugins: dict[str, Plugin] = {} + + for plugin in plugins_to_load: + plugin.initialize() + self.plugins[plugin.name] = plugin + logger.info(f'Initializing plugin: {plugin.name}') + + def _init_bash_shell(self, work_dir: str) -> None: + self.shell = pexpect.spawn('/bin/bash', encoding='utf-8', echo=False) + self.__bash_expect = r'\[PEXPECT\][\$\#] ' + self.__bash_PS1 = r'\u@\h:\w [PEXPECT]\$ ' + self.shell.sendline(f'export PS1="{self.__bash_PS1}"') + self.shell.expect(self.__bash_expect) + self.shell.sendline(f'cd {work_dir}') + self.shell.expect(self.__bash_expect) + + def _execute_bash(self, command, keep_prompt: bool = True): + logger.info(f'Received command: {command}') self.shell.sendline(command) - self.shell.expect(r'[$#] ') - output = self.shell.before.strip().split('\r\n', 1)[1].strip() - # Get the exit code + self.shell.expect(self.__bash_expect) + output = self.shell.before + '$ ' + if not keep_prompt: + # remove the last line of the output (the prompt) + # e.g., user@host:~$ + output = '\r\n'.join(output.split('\r\n')[:-1]) + self.shell.sendline('echo $?') self.shell.expect(r'[$#] ') - exit_code = self.clean_up(self.shell.before.strip().split('\r\n')[1].strip()) + exit_code = int(self.shell.before.split('\r\n')[0].strip()) return output, exit_code - def run_ipython(self, action: IPythonRunCellAction) -> Observation: - obs = self._run_command( - ("cat > /tmp/opendevin_jupyter_temp.py <<'EOL'\n" f'{action.code}\n' 'EOL'), - ) - # run the code - obs = self._run_command('cat /tmp/opendevin_jupyter_temp.py | execute_cli') - output = obs.content - if 'pip install' in action.code: - print(output) - package_names = action.code.split(' ', 2)[-1] - is_single_package = ' ' not in package_names + async def run_action(self, action) -> Observation: + action_type = action.action + observation = await getattr(self, action_type)(action) + observation._parent = action.id + return observation - if 'Successfully installed' in output: - restart_kernel = 'import IPython\nIPython.Application.instance().kernel.do_shutdown(True)' - if ( - 'Note: you may need to restart the kernel to use updated packages.' - in output - ): - self._run_command( - ( - "cat > /tmp/opendevin_jupyter_temp.py <<'EOL'\n" - f'{restart_kernel}\n' - 'EOL' - ) - ) - obs = self._run_command( - 'cat /tmp/opendevin_jupyter_temp.py | execute_cli' - ) - output = '[Package installed successfully]' - if "{'status': 'ok', 'restart': True}" != obs.content.strip(): - print(obs.content) - output += ( - '\n[But failed to restart the kernel to load the package]' + async def run(self, action: CmdRunAction) -> CmdOutputObservation: + try: + output, exit_code = self._execute_bash(action.command) + return CmdOutputObservation( + command_id=-1, + content=str(output), + command=action.command, + exit_code=exit_code, + ) + except UnicodeDecodeError: + raise RuntimeError('Command output could not be decoded as utf-8') + + async def run_ipython(self, action: IPythonRunCellAction) -> Observation: + if 'jupyter' in self.plugins: + _jupyter_plugin: JupyterPlugin = self.plugins['jupyter'] # type: ignore + return await _jupyter_plugin.run(action) + else: + raise RuntimeError( + 'JupyterRequirement not found. Unable to run IPython action.' + ) + + def get_working_directory(self): + result, exit_code = self._execute_bash('pwd', keep_prompt=False) + if exit_code != 0: + raise RuntimeError('Failed to get working directory') + return result.strip() + + def _resolve_path(self, path: str, working_dir: str) -> str: + filepath = Path(path) + if not filepath.is_absolute(): + return str(Path(working_dir) / filepath) + return str(filepath) + + async def read(self, action: FileReadAction) -> Observation: + # NOTE: the client code is running inside the sandbox, + # so there's no need to check permission + working_dir = self.get_working_directory() + filepath = self._resolve_path(action.path, working_dir) + try: + with open(filepath, 'r', encoding='utf-8') as file: + lines = read_lines(file.readlines(), action.start, action.end) + except FileNotFoundError: + return ErrorObservation( + f'File not found: {filepath}. Your current working directory is {working_dir}.' + ) + except UnicodeDecodeError: + return ErrorObservation(f'File could not be decoded as utf-8: {filepath}.') + except IsADirectoryError: + return ErrorObservation( + f'Path is a directory: {filepath}. You can only read files' + ) + + code_view = ''.join(lines) + return FileReadObservation(path=filepath, content=code_view) + + async def write(self, action: FileWriteAction) -> Observation: + working_dir = self.get_working_directory() + filepath = self._resolve_path(action.path, working_dir) + + insert = action.content.split('\n') + try: + if not os.path.exists(os.path.dirname(filepath)): + os.makedirs(os.path.dirname(filepath)) + mode = 'w' if not os.path.exists(filepath) else 'r+' + try: + with open(filepath, mode, encoding='utf-8') as file: + if mode != 'w': + all_lines = file.readlines() + new_file = insert_lines( + insert, all_lines, action.start, action.end ) else: - output += ( - '\n[Kernel restarted successfully to load the package]' - ) + new_file = [i + '\n' for i in insert] - # re-init the kernel after restart - if action.kernel_init_code: - obs = self._run_command( - ( - f"cat > /tmp/opendevin_jupyter_init.py <<'EOL'\n" - f'{action.kernel_init_code}\n' - 'EOL' - ), - ) - obs = self._run_command( - 'cat /tmp/opendevin_jupyter_init.py | execute_cli', - ) - elif ( - is_single_package - and f'Requirement already satisfied: {package_names}' in output - ): - output = '[Package already installed]' - return IPythonRunCellObservation(content=output, code=action.code) + file.seek(0) + file.writelines(new_file) + file.truncate() + except FileNotFoundError: + return ErrorObservation(f'File not found: {filepath}') + except IsADirectoryError: + return ErrorObservation( + f'Path is a directory: {filepath}. You can only write to files' + ) + except UnicodeDecodeError: + return ErrorObservation( + f'File could not be decoded as utf-8: {filepath}' + ) + except PermissionError: + return ErrorObservation(f'Malformed paths not permitted: {filepath}') + return FileWriteObservation(content='', path=filepath) def close(self): self.shell.close() - - ############################################################################ - # Initialization work inside sandbox image - ############################################################################ - - # init_runtime_tools do in EventStreamRuntime - - def init_sandbox_plugins(self, requirements: list[PluginRequirement]) -> None: - # TODO:: test after settle donw the way to move code into sandbox - for requirement in requirements: - self._source_bashrc() - - shutil.copytree(requirement.host_src, requirement.sandbox_dest) - - # Execute the bash script - abs_path_to_bash_script = os.path.join( - requirement.sandbox_dest, requirement.bash_script_path - ) - - print( - f'Initializing plugin [{requirement.name}] by executing [{abs_path_to_bash_script}] in the sandbox.' - ) - output, exit_code = self.execute(abs_path_to_bash_script) - if exit_code != 0: - raise RuntimeError( - f'Failed to initialize plugin {requirement.name} with exit code {exit_code} and output: {output}' - ) - print(f'Plugin {requirement.name} initialized successfully.') - if len(requirements) > 0: - self._source_bashrc() - - def _source_bashrc(self): - output, exit_code = self.execute( - 'source /opendevin/bash.bashrc && source ~/.bashrc' - ) - if exit_code != 0: - raise RuntimeError( - f'Failed to source /opendevin/bash.bashrc and ~/.bashrc with exit code {exit_code} and output: {output}' - ) - print('Sourced /opendevin/bash.bashrc and ~/.bashrc successfully') -def test_run_commond(): - client = RuntimeClient() - command = CmdRunAction(command="ls -l") - obs = client.run_action(command) - print(obs) - command = CmdRunAction(command="pwd") - obs = client.run_action(command) - print(obs) +# def test_run_commond(): +# client = RuntimeClient() +# command = CmdRunAction(command='ls -l') +# obs = client.run_action(command) +# print(obs) +# def test_shell(message): +# shell = pexpect.spawn('/bin/bash', encoding='utf-8') +# shell.expect(r'[$#] ') +# print(f'Received command: {message}') +# shell.sendline(message) +# shell.expect(r'[$#] ') +# output = shell.before.strip().split('\r\n', 1)[1].strip() +# print(f'Output: {output}') +# shell.close() -def test_shell(message): - shell = pexpect.spawn('/bin/bash', encoding='utf-8') - shell.expect(r'[$#] ') - print(f"Received command: {message}") - shell.sendline(message) - shell.expect(r'[$#] ') - output = shell.before.strip().split('\r\n', 1)[1].strip() - shell.close() - print(output) +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('port', type=int, help='Port to listen on') + parser.add_argument('--working-dir', type=str, help='Working directory') + parser.add_argument('--plugins', type=str, help='Plugins to initialize', nargs='+') + # example: python client.py 8000 --working-dir /workspace --plugins JupyterRequirement + args = parser.parse_args() -if __name__ == "__main__": - # print(test_shell("ls -l")) - # client = RuntimeClient() - test_run_commond() - # client.init_sandbox_plugins([AgentSkillsRequirement,JupyterRequirement]) - # print(test_shell("whoami")) + plugins_to_load: list[Plugin] = [] + if args.plugins: + for plugin in args.plugins: + if plugin not in ALL_PLUGINS: + raise ValueError(f'Plugin {plugin} not found') + plugins_to_load.append(ALL_PLUGINS[plugin]()) # type: ignore - \ No newline at end of file + client = RuntimeClient(plugins_to_load, work_dir=args.working_dir) + + @app.middleware('http') + async def one_request_at_a_time(request: Request, call_next): + async with client.lock: + response = await call_next(request) + return response + + @app.post('/execute_action') + async def execute_action(action_request: ActionRequest): + try: + action = event_from_dict(action_request.action) + if not isinstance(action, Action): + raise HTTPException(status_code=400, detail='Invalid action type') + observation = await client.run_action(action) + return event_to_dict(observation) + except Exception as e: + logger.error(f'Error processing command: {str(e)}') + raise HTTPException(status_code=500, detail=str(e)) + + @app.get('/alive') + async def alive(): + return {'status': 'ok'} + + logger.info(f'Starting action execution API on port {args.port}') + print(f'Starting action execution API on port {args.port}') + run(app, host='0.0.0.0', port=args.port) diff --git a/opendevin/runtime/client/runtime.py b/opendevin/runtime/client/runtime.py index 4f651e33a1..a2e38713a9 100644 --- a/opendevin/runtime/client/runtime.py +++ b/opendevin/runtime/client/runtime.py @@ -1,25 +1,15 @@ -from typing import Any import asyncio -import json -import websockets -import docker +import atexit import uuid -from opendevin.events.serialization.action import ACTION_TYPE_TO_CLASS -from opendevin.events.action.action import Action -from opendevin.events.event import Event -from opendevin.events.observation import Observation -from opendevin.events.stream import EventStream -from opendevin.events.serialization import event_to_dict, observation_from_dict -from opendevin.runtime.runtime import Runtime -from opendevin.runtime.server.browse import browse -from opendevin.runtime.server.files import read_file, write_file -from opendevin.runtime.plugins import PluginRequirement +from typing import Optional + +import aiohttp +import docker +import tenacity + from opendevin.core.config import config -from opendevin.events.observation import ( - ErrorObservation, - NullObservation, - Observation, -) +from opendevin.core.logger import opendevin_logger as logger +from opendevin.events import EventSource, EventStream, EventStreamSubscriber from opendevin.events.action import ( AgentRecallAction, BrowseInteractiveAction, @@ -29,83 +19,156 @@ from opendevin.events.action import ( FileWriteAction, IPythonRunCellAction, ) -import asyncio -from opendevin.events import EventSource, EventStream, EventStreamSubscriber +from opendevin.events.action.action import Action +from opendevin.events.event import Event +from opendevin.events.observation import ( + ErrorObservation, + NullObservation, + Observation, +) +from opendevin.events.serialization import event_to_dict, observation_from_dict +from opendevin.events.serialization.action import ACTION_TYPE_TO_CLASS +from opendevin.runtime.plugins import ( + AgentSkillsRequirement, + JupyterRequirement, + PluginRequirement, +) +from opendevin.runtime.runtime import Runtime +from opendevin.runtime.utils import find_available_tcp_port +from opendevin.runtime.utils.image_agnostic import get_od_sandbox_image + class EventStreamRuntime(Runtime): # This runtime will subscribe the event stream # When receive an event, it will send the event to od-runtime-client which run inside the docker environment - - # websocket uri - uri = 'ws://localhost:8080' + container_name_prefix = 'opendevin-sandbox-' - docker_client: docker.DockerClient - def __init__(self, event_stream: EventStream, sid: str = 'default',container_image: str | None = None): - # We don't need sandbox in this runtime, because it's equal to a websocket sandbox - self._init_event_stream(event_stream) - self._init_websocket() - self._init_docker(sid,container_image) + def __init__( + self, + event_stream: EventStream, + sid: str = 'default', + container_image: str | None = None, + plugins: list[PluginRequirement] | None = None, + ): + self._port = find_available_tcp_port() + self.api_url = f'http://localhost:{self._port}' + self.session: Optional[aiohttp.ClientSession] = None - def _init_docker(self,sid,container_image): - self.container_image = container_image - # ( - # config.sandbox_container_image - # if container_image is None - # else container_image - # ) self.instance_id = ( sid + str(uuid.uuid4()) if sid is not None else str(uuid.uuid4()) ) + self.docker_client: docker.DockerClient = self._init_docker_client() + self.container_image = ( + config.sandbox.container_image + if container_image is None + else container_image + ) + self.container_image = get_od_sandbox_image( + self.container_image, self.docker_client, is_eventstream_runtime=True + ) self.container_name = self.container_name_prefix + self.instance_id + atexit.register(self.close) + + # We don't need sandbox in this runtime, because it's equal to a websocket sandbox + self._init_event_stream(event_stream) + self.plugins = plugins if plugins is not None else [] + self.container = self._init_container( + self.sandbox_workspace_dir, + mount_dir=config.workspace_mount_path, + plugins=plugins, + ) + + @staticmethod + def _init_docker_client() -> docker.DockerClient: try: - self.docker_client = docker.from_env() - self._init_sandbox() + return docker.from_env() except Exception as ex: - print( - "Launch docker client failed. Please make sure you have installed docker and started the docker daemon." + logger.error( + 'Launch docker client failed. Please make sure you have installed docker and started the docker daemon.' ) raise ex - - def _init_event_stream(self,event_stream: EventStream): + + @tenacity.retry( + stop=tenacity.stop_after_attempt(5), + wait=tenacity.wait_exponential(multiplier=1, min=4, max=60), + ) + def _init_container( + self, + sandbox_workspace_dir: str, + mount_dir: str = config.workspace_mount_path, + plugins: list[PluginRequirement] | None = None, + ): + """Start a container and return the container object. + + Args: + mount_dir: str: The directory (on host machine) to mount to the container + sandbox_workspace_dir: str: working directory in the container, also the target directory for the mount + """ + + try: + # start the container + logger.info( + f'Starting container with image: {self.container_image} and name: {self.container_name}' + ) + if plugins is None: + plugins = [] + plugin_names = ' '.join([plugin.name for plugin in plugins]) + container = self.docker_client.containers.run( + self.container_image, + command=( + f'/opendevin/miniforge3/bin/mamba run --no-capture-output -n base ' + 'PYTHONUNBUFFERED=1 poetry run ' + f'python -u -m opendevin.runtime.client.client {self._port} ' + f'--working-dir {sandbox_workspace_dir} ' + f'--plugins {plugin_names} ' + ), + # TODO: test it in mac and linux + network_mode='host', + working_dir='/opendevin/code/', + name=self.container_name, + detach=True, + volumes={mount_dir: {'bind': sandbox_workspace_dir, 'mode': 'rw'}}, + ) + logger.info(f'Container started. Server url: {self.api_url}') + return container + except Exception as e: + logger.error('Failed to start container') + logger.exception(e) + self.close(close_client=False) + raise e + + def _init_event_stream(self, event_stream: EventStream): self.event_stream = event_stream self.event_stream.subscribe(EventStreamSubscriber.RUNTIME, self.on_event) - def _init_websocket(self): - self.websocket = None - # TODO: need to initialization globally only once - # self.loop = asyncio.new_event_loop() - # asyncio.set_event_loop(self.loop) - # self.loop.run_until_complete(self._init_websocket_connect()) - - async def _init_websocket_connect(self): - self.websocket = await websockets.connect(self.uri) - - def _init_sandbox(self): - try: - # start the container - mount_dir = config.workspace_mount_path - self.container = self.docker_client.containers.run( - self.container_image, - command='tail -f /dev/null', - # TODO: test the port mapping in mac and linux - # network_mode='host', - working_dir=self.sandbox_workspace_dir, - name=self.container_name, - detach=True, - ports={'8080/tcp': 8080}, - volumes={mount_dir: {'bind': self.sandbox_workspace_dir, 'mode': 'rw'}}, - ) - print('Container started') - except Exception as e: - print('Failed to start container') - raise e + async def _ensure_session(self): + if self.session is None or self.session.closed: + self.session = aiohttp.ClientSession() + return self.session + + @tenacity.retry( + stop=tenacity.stop_after_attempt(5), + wait=tenacity.wait_exponential(multiplier=2, min=4, max=600), + ) + async def _wait_until_alive(self): + async with aiohttp.ClientSession() as session: + async with session.get(f'{self.api_url}/alive') as response: + if response.status == 200: + return + else: + logger.error( + f'Action execution API is not alive. Response: {response}' + ) + raise RuntimeError( + f'Action execution API is not alive. Response: {response}' + ) @property def sandbox_workspace_dir(self): return config.workspace_mount_path_in_sandbox - def close(self): + def close(self, close_client: bool = True): containers = self.docker_client.containers.list(all=True) for container in containers: try: @@ -113,18 +176,19 @@ class EventStreamRuntime(Runtime): container.remove(force=True) except docker.errors.NotFound: pass - self.docker_client.close() - + if close_client: + self.docker_client.close() + async def on_event(self, event: Event) -> None: - print("EventStreamRuntime: on_event triggered") + print('EventStreamRuntime: on_event triggered') if isinstance(event, Action): observation = await self.run_action(event) - print("EventStreamRuntime: observation", observation) + print('EventStreamRuntime: observation', observation) # observation._cause = event.id # type: ignore[attr-defined] source = event.source if event.source else EventSource.AGENT await self.event_stream.add_event(observation, source) - - async def run_action(self, action: Action) -> Observation: + + async def run_action(self, action: Action, timeout: int = 600) -> Observation: """ Run an action and return the resulting observation. If the action is not runnable in any runtime, a NullObservation is returned. @@ -140,66 +204,92 @@ class EventStreamRuntime(Runtime): return ErrorObservation( f'Action {action_type} is not supported in the current runtime.' ) - observation = await getattr(self, action_type)(action) - # TODO: fix ID problem, see comments https://github.com/OpenDevin/OpenDevin/pull/2603#discussion_r1668994137 - observation._parent = action.id # type: ignore[attr-defined] - return observation - - async def run(self, action: CmdRunAction) -> Observation: - return await self._run_command(action) - - async def _run_command( - self, action: Action, _stream: bool = False, timeout: int | None = None - ) -> Observation: - # Send action into websocket and get the result - # TODO: need to initialization globally only once - self.websocket = await websockets.connect(self.uri) - if self.websocket is None: - raise Exception("WebSocket is not connected.") + + # Run action in od-runtime-client + session = await self._ensure_session() + await self._wait_until_alive() try: - await self.websocket.send(json.dumps(event_to_dict(action))) - output = await asyncio.wait_for(self.websocket.recv(), timeout=timeout) - output = json.loads(output) - print("Received output: ", output) + async with session.post( + f'{self.api_url}/execute_action', + json={'action': event_to_dict(action)}, + timeout=timeout, + ) as response: + if response.status == 200: + output = await response.json() + obs = observation_from_dict(output) + obs._cause = action.id # type: ignore[attr-defined] + return obs + else: + error_message = await response.text() + logger.error(f'Error from server: {error_message}') + obs = ErrorObservation(f'Command execution failed: {error_message}') except asyncio.TimeoutError: - print("No response received within the timeout period.") - await self.websocket.close() - return observation_from_dict(output) - + logger.error('No response received within the timeout period.') + obs = ErrorObservation('Command execution timed out') + except Exception as e: + logger.error(f'Error during command execution: {e}') + obs = ErrorObservation(f'Command execution failed: {str(e)}') + # TODO: fix ID problem, see comments https://github.com/OpenDevin/OpenDevin/pull/2603#discussion_r1668994137 + obs._parent = action.id # type: ignore[attr-defined] + return obs + + async def run(self, action: CmdRunAction) -> Observation: + return await self.run_action(action) + async def run_ipython(self, action: IPythonRunCellAction) -> Observation: - return await self._run_command(action) + return await self.run_action(action) - ############################################################################ - # Keep the same with other runtimes - ############################################################################ - - def get_working_directory(self): - # TODO: should we get this from od-runtime-client - return config.workspace_base - async def read(self, action: FileReadAction) -> Observation: - working_dir = self.get_working_directory() - return await read_file(action.path, working_dir, action.start, action.end) - + return await self.run_action(action) + async def write(self, action: FileWriteAction) -> Observation: - working_dir = self.get_working_directory() - return await write_file( - action.path, working_dir, action.content, action.start, action.end - ) - + return await self.run_action(action) + async def browse(self, action: BrowseURLAction) -> Observation: - return await browse(action, self.browser) + return await self.run_action(action) async def browse_interactive(self, action: BrowseInteractiveAction) -> Observation: - return await browse(action, self.browser) + return await self.run_action(action) async def recall(self, action: AgentRecallAction) -> Observation: - return NullObservation('') + return await self.run_action(action) - ############################################################################ + ############################################################################ + # Keep the same with other runtimes + ############################################################################ + + def get_working_directory(self): + # FIXME: this is not needed for the agent - we keep this + # method to be consistent with the other runtimes + # but eventually we will remove this method across all runtimes + # when we use EventStreamRuntime to replace the other sandbox-based runtime + raise NotImplementedError( + 'This method is not implemented in the runtime client.' + ) + + # async def read(self, action: FileReadAction) -> Observation: + # working_dir = self.get_working_directory() + # return await read_file(action.path, working_dir, action.start, action.end) + + # async def write(self, action: FileWriteAction) -> Observation: + # working_dir = self.get_working_directory() + # return await write_file( + # action.path, working_dir, action.content, action.start, action.end + # ) + + # async def browse(self, action: BrowseURLAction) -> Observation: + # return await browse(action, self.browser) + + # async def browse_interactive(self, action: BrowseInteractiveAction) -> Observation: + # return await browse(action, self.browser) + + # async def recall(self, action: AgentRecallAction) -> Observation: + # return NullObservation('') + + ############################################################################ # Initialization work inside sandbox image - ############################################################################ - + ############################################################################ + # init_runtime_tools direcctly do as what Runtime do # Do in the od_runtime_client @@ -208,53 +298,72 @@ class EventStreamRuntime(Runtime): pass - def test_run_command(): - sid = "test" + sid = 'test' cli_session = 'main' + ('_' + sid if sid else '') event_stream = EventStream(cli_session) runtime = EventStreamRuntime(event_stream) - asyncio.run(runtime._run_command(CmdRunAction('ls -l'))) + asyncio.run(runtime.run_action(CmdRunAction('ls -l'))) + async def test_event_stream(): - sid = "test" + sid = 'test' cli_session = 'main' + ('_' + sid if sid else '') event_stream = EventStream(cli_session) - runtime = EventStreamRuntime(event_stream) + runtime = EventStreamRuntime( + event_stream, + sid, + 'ubuntu:22.04', + plugins=[JupyterRequirement(), AgentSkillsRequirement()], + ) # Test run command action_cmd = CmdRunAction(command='ls -l') - print(await runtime.run_action(action_cmd)) + logger.info(action_cmd, extra={'msg_type': 'ACTION'}) + logger.info(await runtime.run_action(action_cmd), extra={'msg_type': 'OBSERVATION'}) # Test run ipython - test_code = "print('Hello, `World`!\n')" - action_opython = IPythonRunCellAction(code=test_code) - print(await runtime.run_action(action_opython)) + test_code = "print('Hello, `World`!\\n')" + action_ipython = IPythonRunCellAction(code=test_code) + logger.info(action_ipython, extra={'msg_type': 'ACTION'}) + logger.info( + await runtime.run_action(action_ipython), extra={'msg_type': 'OBSERVATION'} + ) - # Test read file + # Test read file (file should not exist) action_read = FileReadAction(path='hello.sh') - print(await runtime.run_action(action_read)) + logger.info(action_read, extra={'msg_type': 'ACTION'}) + logger.info( + await runtime.run_action(action_read), extra={'msg_type': 'OBSERVATION'} + ) # Test write file action_write = FileWriteAction(content='echo "Hello, World!"', path='hello.sh') - print(await runtime.run_action(action_write)) + logger.info(action_write, extra={'msg_type': 'ACTION'}) + logger.info( + await runtime.run_action(action_write), extra={'msg_type': 'OBSERVATION'} + ) + + # Test read file (file should exist) + action_read = FileReadAction(path='hello.sh') + logger.info(action_read, extra={'msg_type': 'ACTION'}) + logger.info( + await runtime.run_action(action_read), extra={'msg_type': 'OBSERVATION'} + ) # Test browse action_browse = BrowseURLAction(url='https://google.com') - print(await runtime.run_action(action_browse)) + logger.info(action_browse, extra={'msg_type': 'ACTION'}) + logger.info( + await runtime.run_action(action_browse), extra={'msg_type': 'OBSERVATION'} + ) # Test recall action_recall = AgentRecallAction(query='who am I?') - print(await runtime.run_action(action_recall)) + logger.info(action_recall, extra={'msg_type': 'ACTION'}) + logger.info( + await runtime.run_action(action_recall), extra={'msg_type': 'OBSERVATION'} + ) -def test_docker_launch(): - sid = "test" - cli_session = 'main' + ('_' + sid if sid else '') - event_stream = EventStream(cli_session) - runtime = EventStreamRuntime(event_stream,sid,"ghcr.io/opendevin/sandbox:main") - runtime.close() -if __name__ == "__main__": +if __name__ == '__main__': asyncio.run(test_event_stream()) - - - \ No newline at end of file diff --git a/opendevin/runtime/plugins/__init__.py b/opendevin/runtime/plugins/__init__.py index a32f9972aa..9c4e01cc8a 100644 --- a/opendevin/runtime/plugins/__init__.py +++ b/opendevin/runtime/plugins/__init__.py @@ -1,14 +1,22 @@ # Requirements -from .agent_skills import AgentSkillsRequirement -from .jupyter import JupyterRequirement +from .agent_skills import AgentSkillsPlugin, AgentSkillsRequirement +from .jupyter import JupyterPlugin, JupyterRequirement from .mixin import PluginMixin -from .requirement import PluginRequirement +from .requirement import Plugin, PluginRequirement from .swe_agent_commands import SWEAgentCommandsRequirement __all__ = [ + 'Plugin', 'PluginMixin', 'PluginRequirement', 'AgentSkillsRequirement', + 'AgentSkillsPlugin', 'JupyterRequirement', + 'JupyterPlugin', 'SWEAgentCommandsRequirement', ] + +ALL_PLUGINS = { + 'jupyter': JupyterPlugin, + 'agent_skills': AgentSkillsPlugin, +} diff --git a/opendevin/runtime/plugins/agent_skills/__init__.py b/opendevin/runtime/plugins/agent_skills/__init__.py index 1df5a3046b..8b1a3c7335 100644 --- a/opendevin/runtime/plugins/agent_skills/__init__.py +++ b/opendevin/runtime/plugins/agent_skills/__init__.py @@ -2,7 +2,7 @@ import os from dataclasses import dataclass from opendevin.runtime.plugins.agent_skills.agentskills import DOCUMENTATION -from opendevin.runtime.plugins.requirement import PluginRequirement +from opendevin.runtime.plugins.requirement import Plugin, PluginRequirement @dataclass @@ -14,3 +14,7 @@ class AgentSkillsRequirement(PluginRequirement): sandbox_dest: str = '/opendevin/plugins/agent_skills' bash_script_path: str = 'setup.sh' documentation: str = DOCUMENTATION + + +class AgentSkillsPlugin(Plugin): + name: str = 'agent_skills' diff --git a/opendevin/runtime/plugins/jupyter/__init__.py b/opendevin/runtime/plugins/jupyter/__init__.py index c6af007446..1178a95fd0 100644 --- a/opendevin/runtime/plugins/jupyter/__init__.py +++ b/opendevin/runtime/plugins/jupyter/__init__.py @@ -1,7 +1,14 @@ import os +import subprocess +import time from dataclasses import dataclass -from opendevin.runtime.plugins.requirement import PluginRequirement +from opendevin.events.action import Action, IPythonRunCellAction +from opendevin.events.observation import IPythonRunCellObservation, Observation +from opendevin.runtime.plugins.requirement import Plugin, PluginRequirement +from opendevin.runtime.utils import find_available_tcp_port + +from .execute_server import JupyterKernel @dataclass @@ -12,3 +19,57 @@ class JupyterRequirement(PluginRequirement): ) # The directory of this file (opendevin/runtime/plugins/jupyter) sandbox_dest: str = '/opendevin/plugins/jupyter' bash_script_path: str = 'setup.sh' + + # ================================================================ + # Plugin methods, which will ONLY be used in the runtime client + # running inside docker + # ================================================================ + + +class JupyterPlugin(Plugin): + name: str = 'jupyter' + + def initialize(self, kernel_id: str = 'opendevin-default'): + self.kernel_gateway_port = find_available_tcp_port() + self.kernel_id = kernel_id + self.gateway_process = subprocess.Popen( + [ + '/opendevin/miniforge3/bin/mamba', + 'run', + '-n', + 'base', + 'poetry', + 'run', + 'jupyter', + 'kernelgateway', + '--KernelGatewayApp.ip=0.0.0.0', + f'--KernelGatewayApp.port={self.kernel_gateway_port}', + ], + stderr=subprocess.STDOUT, + ) + # read stdout until the kernel gateway is ready + while True and self.gateway_process.stdout is not None: + line = self.gateway_process.stdout.readline().decode('utf-8') + if 'at' in line: + break + time.sleep(1) + print('Waiting for jupyter kernel gateway to start...') + + async def run(self, action: Action) -> Observation: + if not isinstance(action, IPythonRunCellAction): + raise ValueError( + f'Jupyter plugin only supports IPythonRunCellAction, but got {action}' + ) + + if not hasattr(self, 'kernel'): + self.kernel = JupyterKernel( + f'localhost:{self.kernel_gateway_port}', self.kernel_id + ) + + if not self.kernel.initialized: + await self.kernel.initialize() + output = await self.kernel.execute(action.code) + return IPythonRunCellObservation( + content=output, + code=action.code, + ) diff --git a/opendevin/runtime/plugins/jupyter/execute_server b/opendevin/runtime/plugins/jupyter/execute_server.py similarity index 98% rename from opendevin/runtime/plugins/jupyter/execute_server rename to opendevin/runtime/plugins/jupyter/execute_server.py index 4560fa4a07..d4806e5497 100755 --- a/opendevin/runtime/plugins/jupyter/execute_server +++ b/opendevin/runtime/plugins/jupyter/execute_server.py @@ -64,6 +64,7 @@ class JupyterKernel: self.heartbeat_interval = 10000 # 10 seconds self.heartbeat_callback = None + self.initialized = False async def initialize(self): await self.execute(r'%colors nocolor') @@ -76,6 +77,7 @@ class JupyterKernel: for tool in self.tools_to_run: # logging.info(f'Tool initialized:\n{tool}') await self.execute(tool) + self.initialized = True async def _send_heartbeat(self): if not self.ws: @@ -139,6 +141,7 @@ class JupyterKernel: await self._connect() msg_id = uuid4().hex + assert self.ws is not None self.ws.write_message( json_encode( { @@ -169,6 +172,7 @@ class JupyterKernel: async def wait_for_messages(): execution_done = False while not execution_done: + assert self.ws is not None msg = await self.ws.read_message() msg = json_decode(msg) msg_type = msg['msg_type'] diff --git a/opendevin/runtime/plugins/jupyter/setup.sh b/opendevin/runtime/plugins/jupyter/setup.sh index e54649303e..87b4d30be5 100755 --- a/opendevin/runtime/plugins/jupyter/setup.sh +++ b/opendevin/runtime/plugins/jupyter/setup.sh @@ -64,7 +64,7 @@ echo "JupyterKernelGateway started with PID: $JUPYTER_GATEWAY_PID" # Start the jupyter_server export JUPYTER_EXEC_SERVER_PORT=$(find_free_port 30000 40000) echo "export JUPYTER_EXEC_SERVER_PORT=$JUPYTER_EXEC_SERVER_PORT" >> ~/.bashrc -$OPENDEVIN_PYTHON_INTERPRETER /opendevin/plugins/jupyter/execute_server > /opendevin/logs/jupyter_execute_server.log 2>&1 & +$OPENDEVIN_PYTHON_INTERPRETER /opendevin/plugins/jupyter/execute_server.py > /opendevin/logs/jupyter_execute_server.log 2>&1 & export JUPYTER_EXEC_SERVER_PID=$! echo "export JUPYTER_EXEC_SERVER_PID=$JUPYTER_EXEC_SERVER_PID" >> ~/.bashrc echo "Execution server started with PID: $JUPYTER_EXEC_SERVER_PID" diff --git a/opendevin/runtime/plugins/requirement.py b/opendevin/runtime/plugins/requirement.py index c4ade58849..3971dad8c1 100644 --- a/opendevin/runtime/plugins/requirement.py +++ b/opendevin/runtime/plugins/requirement.py @@ -1,5 +1,28 @@ +from abc import abstractmethod from dataclasses import dataclass +from opendevin.events.action import Action +from opendevin.events.observation import Observation + + +class Plugin: + """Base class for a plugin. + + This will be initialized by the runtime client, which will run inside docker. + """ + + name: str + + @abstractmethod + def initialize(self): + """Initialize the plugin.""" + pass + + @abstractmethod + async def run(self, action: Action) -> Observation: + """Run the plugin for a given action.""" + pass + @dataclass class PluginRequirement: diff --git a/opendevin/runtime/utils/image_agnostic.py b/opendevin/runtime/utils/image_agnostic.py index a48fb4579d..7775b67287 100644 --- a/opendevin/runtime/utils/image_agnostic.py +++ b/opendevin/runtime/utils/image_agnostic.py @@ -1,11 +1,14 @@ +import os import tempfile import docker from opendevin.core.logger import opendevin_logger as logger +from .source import create_project_source_dist -def generate_dockerfile_content(base_image: str) -> str: + +def generate_dockerfile(base_image: str) -> str: """ Generate the Dockerfile content for the agnostic sandbox image based on user-provided base image. @@ -32,28 +35,116 @@ def generate_dockerfile_content(base_image: str) -> str: return dockerfile_content +def generate_dockerfile_for_eventstream_runtime( + base_image: str, temp_dir: str, skip_init: bool = False +) -> str: + """ + Generate the Dockerfile content for the eventstream runtime image based on user-provided base image. + + NOTE: This is only tested on debian yet. + """ + if skip_init: + dockerfile_content = f'FROM {base_image}\n' + else: + dockerfile_content = ( + f'FROM {base_image}\n' + 'RUN apt update && apt install -y wget sudo\n' + 'RUN mkdir -p /opendevin && mkdir -p /opendevin/logs && chmod 777 /opendevin/logs\n' + 'RUN echo "" > /opendevin/bash.bashrc\n' + 'RUN if [ ! -d /opendevin/miniforge3 ]; then \\\n' + ' wget --progress=bar:force -O Miniforge3.sh "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-$(uname)-$(uname -m).sh" && \\\n' + ' bash Miniforge3.sh -b -p /opendevin/miniforge3 && \\\n' + ' rm Miniforge3.sh && \\\n' + ' chmod -R g+w /opendevin/miniforge3 && \\\n' + ' bash -c ". /opendevin/miniforge3/etc/profile.d/conda.sh && conda config --set changeps1 False && conda config --append channels conda-forge"; \\\n' + ' fi\n' + 'RUN /opendevin/miniforge3/bin/mamba install python=3.11\n' + 'RUN /opendevin/miniforge3/bin/mamba install conda-forge::poetry\n' + ) + + tarball_path = create_project_source_dist() + filename = os.path.basename(tarball_path) + filename = filename.removesuffix('.tar.gz') + + # move the tarball to temp_dir + os.rename(tarball_path, os.path.join(temp_dir, 'project.tar.gz')) + logger.info( + f'Source distribution moved to {os.path.join(temp_dir, "project.tar.gz")}' + ) + + # Copy the project directory to the container + dockerfile_content += 'COPY project.tar.gz /opendevin\n' + # remove /opendevin/code if it exists + dockerfile_content += ( + 'RUN if [ -d /opendevin/code ]; then rm -rf /opendevin/code; fi\n' + ) + # unzip the tarball to /opendevin/code + dockerfile_content += ( + 'RUN cd /opendevin && tar -xzvf project.tar.gz && rm project.tar.gz\n' + ) + dockerfile_content += f'RUN mv /opendevin/{filename} /opendevin/code\n' + # install (or update) the dependencies + dockerfile_content += ( + 'RUN cd /opendevin/code && ' + '/opendevin/miniforge3/bin/mamba run -n base poetry env use python3.11 && ' + '/opendevin/miniforge3/bin/mamba run -n base poetry install\n' + ) + return dockerfile_content + + def _build_sandbox_image( - base_image: str, target_image_name: str, docker_client: docker.DockerClient + base_image: str, + target_image_name: str, + docker_client: docker.DockerClient, + eventstream_runtime: bool = False, + skip_init: bool = False, ): try: with tempfile.TemporaryDirectory() as temp_dir: - dockerfile_content = generate_dockerfile_content(base_image) - logger.info(f'Building agnostic sandbox image: {target_image_name}') - logger.info( - ( - f'===== Dockerfile content =====\n' - f'{dockerfile_content}\n' - f'===============================' + if eventstream_runtime: + dockerfile_content = generate_dockerfile_for_eventstream_runtime( + base_image, temp_dir, skip_init=skip_init + ) + else: + dockerfile_content = generate_dockerfile(base_image) + + if skip_init: + logger.info( + f'Reusing existing od_sandbox image [{target_image_name}] but will update the source code in it.' + ) + logger.info( + ( + f'===== Dockerfile content =====\n' + f'{dockerfile_content}\n' + f'===============================' + ) + ) + else: + logger.info(f'Building agnostic sandbox image: {target_image_name}') + logger.info( + ( + f'===== Dockerfile content =====\n' + f'{dockerfile_content}\n' + f'===============================' + ) ) - ) with open(f'{temp_dir}/Dockerfile', 'w') as file: file.write(dockerfile_content) api_client = docker_client.api build_logs = api_client.build( - path=temp_dir, tag=target_image_name, rm=True, decode=True + path=temp_dir, + tag=target_image_name, + rm=True, + decode=True, + # do not use cache when skip_init is True (i.e., when we want to update the source code in the existing image) + nocache=skip_init, ) + if skip_init: + logger.info( + f'Rebuilding existing od_sandbox image [{target_image_name}] to update the source code.' + ) for log in build_logs: if 'stream' in log: print(log['stream'].strip()) @@ -71,16 +162,23 @@ def _build_sandbox_image( raise e -def _get_new_image_name(base_image: str) -> str: +def _get_new_image_name(base_image: str, is_eventstream_runtime: bool) -> str: + prefix = 'od_sandbox' + if is_eventstream_runtime: + prefix = 'od_eventstream_runtime' if ':' not in base_image: base_image = base_image + ':latest' [repo, tag] = base_image.split(':') repo = repo.replace('/', '___') - return f'od_sandbox:{repo}__{tag}' + return f'{prefix}:{repo}__{tag}' -def get_od_sandbox_image(base_image: str, docker_client: docker.DockerClient) -> str: +def get_od_sandbox_image( + base_image: str, + docker_client: docker.DockerClient, + is_eventstream_runtime: bool = False, +) -> str: """Return the sandbox image name based on user-provided base image. The returned sandbox image is assumed to contains all the required dependencies for OpenDevin. @@ -90,18 +188,37 @@ def get_od_sandbox_image(base_image: str, docker_client: docker.DockerClient) -> if 'ghcr.io/opendevin/sandbox' in base_image: return base_image - new_image_name = _get_new_image_name(base_image) + new_image_name = _get_new_image_name(base_image, is_eventstream_runtime) # Detect if the sandbox image is built + image_exists = False images = docker_client.images.list() for image in images: if new_image_name in image.tags: logger.info('Found existing od_sandbox image, reuse:' + new_image_name) - return new_image_name + image_exists = True + break - # If the sandbox image is not found, build it - logger.info( - f'od_sandbox image is not found for {base_image}, will build: {new_image_name}' + skip_init = False + if image_exists: + if is_eventstream_runtime: + skip_init = True + base_image = new_image_name + logger.info( + f'Reusing existing od_sandbox image [{new_image_name}] but will update the source code.' + ) + else: + return new_image_name + else: + # If the sandbox image is not found, build it + logger.info( + f'od_sandbox image is not found for {base_image}, will build: {new_image_name}' + ) + _build_sandbox_image( + base_image, + new_image_name, + docker_client, + is_eventstream_runtime, + skip_init=skip_init, ) - _build_sandbox_image(base_image, new_image_name, docker_client) return new_image_name diff --git a/opendevin/runtime/utils/source.py b/opendevin/runtime/utils/source.py new file mode 100644 index 0000000000..1cae2b38f1 --- /dev/null +++ b/opendevin/runtime/utils/source.py @@ -0,0 +1,32 @@ +import os +import subprocess +from importlib.metadata import version + +import opendevin +from opendevin.core.logger import opendevin_logger as logger + + +def create_project_source_dist(): + """Create a source distribution of the project. Return the path to the tarball.""" + + # Copy the project directory to the container + # get the location of "opendevin" package + project_root = os.path.dirname(os.path.dirname(os.path.abspath(opendevin.__file__))) + logger.info(f'Using project root: {project_root}') + + # run "python -m build -s" on project_root + result = subprocess.run(['python', '-m', 'build', '-s', project_root]) + if result.returncode != 0: + logger.error(f'Build failed: {result}') + raise Exception(f'Build failed: {result}') + logger.info(f'Source distribution create result: {result}') + + tarball_path = os.path.join( + project_root, 'dist', f'opendevin-{version("opendevin")}.tar.gz' + ) + if not os.path.exists(tarball_path): + logger.error(f'Source distribution not found at {tarball_path}') + raise Exception(f'Source distribution not found at {tarball_path}') + logger.info(f'Source distribution created at {tarball_path}') + + return tarball_path diff --git a/opendevin/server/session/agent.py b/opendevin/server/session/agent.py index a5e614f144..54b7353ba4 100644 --- a/opendevin/server/session/agent.py +++ b/opendevin/server/session/agent.py @@ -9,10 +9,7 @@ from opendevin.core.logger import opendevin_logger as logger from opendevin.core.schema import ConfigType from opendevin.events.stream import EventStream from opendevin.llm.llm import LLM -from opendevin.runtime import DockerSSHBox -from opendevin.runtime.e2b.runtime import E2BRuntime -from opendevin.runtime.runtime import Runtime -from opendevin.runtime.server.runtime import ServerRuntime +from opendevin.runtime import DockerSSHBox, Runtime, get_runtime_cls class AgentSession: @@ -60,16 +57,10 @@ class AgentSession: async def _create_runtime(self): if self.runtime is not None: raise Exception('Runtime already created') - if config.runtime == 'server': - logger.info('Using server runtime') - self.runtime = ServerRuntime(self.event_stream, self.sid) - elif config.runtime == 'e2b': - logger.info('Using E2B runtime') - self.runtime = E2BRuntime(self.event_stream, self.sid) - else: - raise Exception( - f'Runtime not defined in config, or is invalid: {config.runtime}' - ) + + logger.info(f'Using runtime: {config.runtime}') + runtime_cls = get_runtime_cls(config.runtime) + self.runtime = runtime_cls(self.event_stream, self.sid) async def _create_controller(self, start_event: dict): """Creates an AgentController instance. diff --git a/poetry.lock b/poetry.lock index d4b52243bb..cbe3bdcf5c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aenum" @@ -187,6 +187,17 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + [[package]] name = "argon2-cffi" version = "23.1.0" @@ -244,6 +255,25 @@ cffi = ">=1.0.1" dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] +[[package]] +name = "arrow" +version = "1.3.0" +description = "Better dates & times for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] + [[package]] name = "asgiref" version = "3.8.1" @@ -258,6 +288,35 @@ files = [ [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "async-lru" +version = "2.0.4" +description = "Simple LRU cache for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, + {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, +] + [[package]] name = "attrs" version = "23.2.0" @@ -314,6 +373,20 @@ msal = ">=1.24.0" msal-extensions = ">=0.3.0" typing-extensions = ">=4.0.0" +[[package]] +name = "babel" +version = "2.15.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "backoff" version = "2.2.1" @@ -404,6 +477,24 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] +[[package]] +name = "bleach" +version = "6.1.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.3)"] + [[package]] name = "blinker" version = "1.8.2" @@ -920,6 +1011,23 @@ humanfriendly = ">=9.1" [package.extras] cron = ["capturer (>=2.4)"] +[[package]] +name = "comm" +version = "0.2.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.8" +files = [ + {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, + {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +test = ["pytest"] + [[package]] name = "contourpy" version = "1.2.1" @@ -1175,6 +1283,37 @@ tests = ["Pillow (>=9.4.0)", "absl-py", "elasticsearch (<8.0.0)", "faiss-cpu (>= torch = ["torch"] vision = ["Pillow (>=9.4.0)"] +[[package]] +name = "debugpy" +version = "1.8.2" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "debugpy-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7ee2e1afbf44b138c005e4380097d92532e1001580853a7cb40ed84e0ef1c3d2"}, + {file = "debugpy-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f8c3f7c53130a070f0fc845a0f2cee8ed88d220d6b04595897b66605df1edd6"}, + {file = "debugpy-1.8.2-cp310-cp310-win32.whl", hash = "sha256:f179af1e1bd4c88b0b9f0fa153569b24f6b6f3de33f94703336363ae62f4bf47"}, + {file = "debugpy-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:0600faef1d0b8d0e85c816b8bb0cb90ed94fc611f308d5fde28cb8b3d2ff0fe3"}, + {file = "debugpy-1.8.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8a13417ccd5978a642e91fb79b871baded925d4fadd4dfafec1928196292aa0a"}, + {file = "debugpy-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acdf39855f65c48ac9667b2801234fc64d46778021efac2de7e50907ab90c634"}, + {file = "debugpy-1.8.2-cp311-cp311-win32.whl", hash = "sha256:2cbd4d9a2fc5e7f583ff9bf11f3b7d78dfda8401e8bb6856ad1ed190be4281ad"}, + {file = "debugpy-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:d3408fddd76414034c02880e891ea434e9a9cf3a69842098ef92f6e809d09afa"}, + {file = "debugpy-1.8.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5d3ccd39e4021f2eb86b8d748a96c766058b39443c1f18b2dc52c10ac2757835"}, + {file = "debugpy-1.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62658aefe289598680193ff655ff3940e2a601765259b123dc7f89c0239b8cd3"}, + {file = "debugpy-1.8.2-cp312-cp312-win32.whl", hash = "sha256:bd11fe35d6fd3431f1546d94121322c0ac572e1bfb1f6be0e9b8655fb4ea941e"}, + {file = "debugpy-1.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:15bc2f4b0f5e99bf86c162c91a74c0631dbd9cef3c6a1d1329c946586255e859"}, + {file = "debugpy-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:5a019d4574afedc6ead1daa22736c530712465c0c4cd44f820d803d937531b2d"}, + {file = "debugpy-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40f062d6877d2e45b112c0bbade9a17aac507445fd638922b1a5434df34aed02"}, + {file = "debugpy-1.8.2-cp38-cp38-win32.whl", hash = "sha256:c78ba1680f1015c0ca7115671fe347b28b446081dada3fedf54138f44e4ba031"}, + {file = "debugpy-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cf327316ae0c0e7dd81eb92d24ba8b5e88bb4d1b585b5c0d32929274a66a5210"}, + {file = "debugpy-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1523bc551e28e15147815d1397afc150ac99dbd3a8e64641d53425dba57b0ff9"}, + {file = "debugpy-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e24ccb0cd6f8bfaec68d577cb49e9c680621c336f347479b3fce060ba7c09ec1"}, + {file = "debugpy-1.8.2-cp39-cp39-win32.whl", hash = "sha256:7f8d57a98c5a486c5c7824bc0b9f2f11189d08d73635c326abef268f83950326"}, + {file = "debugpy-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:16c8dcab02617b75697a0a925a62943e26a0330da076e2a10437edd9f0bf3755"}, + {file = "debugpy-1.8.2-py2.py3-none-any.whl", hash = "sha256:16e16df3a98a35c63c3ab1e4d19be4cbc7fdda92d9ddc059294f18910928e0ca"}, + {file = "debugpy-1.8.2.zip", hash = "sha256:95378ed08ed2089221896b9b3a8d021e642c24edc8fef20e5d4342ca8be65c00"}, +] + [[package]] name = "decorator" version = "5.1.1" @@ -1186,6 +1325,17 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + [[package]] name = "deprecated" version = "1.2.14" @@ -1386,6 +1536,20 @@ tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] tests = ["Werkzeug (>=1.0.1)", "absl-py", "accelerate", "bert-score (>=0.3.6)", "cer (>=1.2.0)", "charcut (>=1.1.1)", "jiwer", "mauve-text", "nltk", "pytest", "pytest-datadir", "pytest-xdist", "requests-file (>=1.5.1)", "rouge-score (>=0.1.2)", "sacrebleu", "sacremoses", "scikit-learn", "scipy (>=1.10.0)", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1,<=2.10)", "texttable (>=1.6.3)", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "torch", "transformers", "trectools", "unidecode (>=1.3.4)"] torch = ["torch"] +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + [[package]] name = "faker" version = "26.0.0" @@ -1472,6 +1636,20 @@ packaging = "*" [package.extras] dev = ["matplotlib", "nbclassic", "nbdev (>=0.2.39)", "numpy", "pandas", "pillow", "torch"] +[[package]] +name = "fastjsonschema" +version = "2.20.0" +description = "Fastest Python implementation of JSON schema" +optional = false +python-versions = "*" +files = [ + {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, + {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, +] + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + [[package]] name = "filelock" version = "3.15.4" @@ -1602,6 +1780,17 @@ ufo = ["fs (>=2.2.0,<3)"] unicode = ["unicodedata2 (>=15.1.0)"] woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +files = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] + [[package]] name = "frozenlist" version = "1.4.1" @@ -2660,6 +2849,90 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "ipykernel" +version = "6.29.5" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, + {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=24" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.26.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +files = [ + {file = "ipython-8.26.0-py3-none-any.whl", hash = "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff"}, + {file = "ipython-8.26.0.tar.gz", hash = "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt-toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5.13.0" +typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +optional = false +python-versions = ">=3.7" +files = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] + +[package.dependencies] +arrow = ">=0.15.0" + [[package]] name = "itsdangerous" version = "2.2.0" @@ -2671,6 +2944,25 @@ files = [ {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, ] +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + [[package]] name = "jinja2" version = "3.1.4" @@ -2721,6 +3013,28 @@ files = [ {file = "json_repair-0.25.3.tar.gz", hash = "sha256:4ee970581a05b0b258b749eb8bcac21de380edda97c3717a4edfafc519ec21a4"}, ] +[[package]] +name = "json5" +version = "0.9.25" +description = "A Python implementation of the JSON5 data format." +optional = false +python-versions = ">=3.8" +files = [ + {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, + {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, +] + +[[package]] +name = "jsonpointer" +version = "3.0.0" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + [[package]] name = "jsonrpcclient" version = "4.0.3" @@ -2747,9 +3061,17 @@ files = [ [package.dependencies] attrs = ">=22.2.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} jsonschema-specifications = ">=2023.03.6" referencing = ">=0.28.4" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} rpds-py = ">=0.7.1" +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] @@ -2769,6 +3091,234 @@ files = [ [package.dependencies] referencing = ">=0.31.0" +[[package]] +name = "jupyter-client" +version = "8.6.2" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, + {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, +] + +[package.dependencies] +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyter-events" +version = "0.10.0" +description = "Jupyter Event System library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, + {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, +] + +[package.dependencies] +jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +python-json-logger = ">=2.0.4" +pyyaml = ">=5.3" +referencing = "*" +rfc3339-validator = "*" +rfc3986-validator = ">=0.1.1" +traitlets = ">=5.3" + +[package.extras] +cli = ["click", "rich"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] + +[[package]] +name = "jupyter-kernel-gateway" +version = "3.0.1" +description = "A web server for spawning and communicating with Jupyter kernels" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_kernel_gateway-3.0.1-py3-none-any.whl", hash = "sha256:9f74a2f4ff9f03737bcab79f44ae0f6473ee32deb30fce00b8f05adcdd023f03"}, + {file = "jupyter_kernel_gateway-3.0.1.tar.gz", hash = "sha256:900690c4c0e796867355468d685f7fa1cf3c7775d08e871c157f77d65fbd6d7f"}, +] + +[package.dependencies] +jupyter-client = ">=8.6" +jupyter-core = ">=5.7" +jupyter-server = ">=2.12" +requests = ">=2.31" +tornado = ">=6.4" +traitlets = ">=5.14.1" + +[package.extras] +docs = ["myst-parser", "sphinx", "sphinx-rtd-theme"] +test = ["coverage", "ipykernel", "pytest", "pytest-cov", "pytest-jupyter", "pytest-timeout"] + +[[package]] +name = "jupyter-lsp" +version = "2.2.5" +description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, + {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, +] + +[package.dependencies] +jupyter-server = ">=1.1.2" + +[[package]] +name = "jupyter-server" +version = "2.14.1" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server-2.14.1-py3-none-any.whl", hash = "sha256:16f7177c3a4ea8fe37784e2d31271981a812f0b2874af17339031dc3510cc2a5"}, + {file = "jupyter_server-2.14.1.tar.gz", hash = "sha256:12558d158ec7a0653bf96cc272bc7ad79e0127d503b982ed144399346694f726"}, +] + +[package.dependencies] +anyio = ">=3.1.0" +argon2-cffi = ">=21.1" +jinja2 = ">=3.0.3" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-events = ">=0.9.0" +jupyter-server-terminals = ">=0.4.4" +nbconvert = ">=6.4.4" +nbformat = ">=5.3.0" +overrides = ">=5.0" +packaging = ">=22.0" +prometheus-client = ">=0.9" +pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} +pyzmq = ">=24" +send2trash = ">=1.8.2" +terminado = ">=0.8.3" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" +websocket-client = ">=1.7" + +[package.extras] +docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.5.3" +description = "A Jupyter Server Extension Providing Terminals." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, + {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, +] + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] + +[[package]] +name = "jupyterlab" +version = "4.2.3" +description = "JupyterLab computational environment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab-4.2.3-py3-none-any.whl", hash = "sha256:0b59d11808e84bb84105c73364edfa867dd475492429ab34ea388a52f2e2e596"}, + {file = "jupyterlab-4.2.3.tar.gz", hash = "sha256:df6e46969ea51d66815167f23d92f105423b7f1f06fa604d4f44aeb018c82c7b"}, +] + +[package.dependencies] +async-lru = ">=1.0.0" +httpx = ">=0.25.0" +ipykernel = ">=6.5.0" +jinja2 = ">=3.0.3" +jupyter-core = "*" +jupyter-lsp = ">=2.0.0" +jupyter-server = ">=2.4.0,<3" +jupyterlab-server = ">=2.27.1,<3" +notebook-shim = ">=0.2" +packaging = "*" +setuptools = ">=40.1.0" +tornado = ">=6.2.0" +traitlets = "*" + +[package.extras] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.3.5)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.3.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.2)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.1.post2)", "matplotlib (==3.8.3)", "nbconvert (>=7.0.0)", "pandas (==2.2.1)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] +test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] +upgrade-extension = ["copier (>=8,<10)", "jinja2-time (<0.3)", "pydantic (<2.0)", "pyyaml-include (<2.0)", "tomli-w (<2.0)"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.3.0" +description = "Pygments theme using JupyterLab CSS variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, + {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, +] + +[[package]] +name = "jupyterlab-server" +version = "2.27.2" +description = "A set of server components for JupyterLab and JupyterLab like applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab_server-2.27.2-py3-none-any.whl", hash = "sha256:54aa2d64fd86383b5438d9f0c032f043c4d8c0264b8af9f60bd061157466ea43"}, + {file = "jupyterlab_server-2.27.2.tar.gz", hash = "sha256:15cbb349dc45e954e09bacf81b9f9bcb10815ff660fb2034ecd7417db3a7ea27"}, +] + +[package.dependencies] +babel = ">=2.10" +jinja2 = ">=3.0.3" +json5 = ">=0.9.0" +jsonschema = ">=4.18.0" +jupyter-server = ">=1.21,<3" +packaging = ">=21.3" +requests = ">=2.31" + +[package.extras] +docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] +openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] + [[package]] name = "kiwisolver" version = "1.4.5" @@ -3407,13 +3957,9 @@ files = [ {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, @@ -3648,6 +4194,20 @@ python-dateutil = ">=2.7" [package.extras] dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"] +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + [[package]] name = "mccabe" version = "0.7.0" @@ -3705,6 +4265,17 @@ pycryptodome = "*" typing-extensions = "*" urllib3 = "*" +[[package]] +name = "mistune" +version = "3.0.2" +description = "A sane and fast Markdown parser with useful plugins and renderers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, +] + [[package]] name = "mmh3" version = "4.1.0" @@ -4039,6 +4610,86 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "nbclient" +version = "0.10.0" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, + {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, +] + +[package.dependencies] +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +nbformat = ">=5.1" +traitlets = ">=5.4" + +[package.extras] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] + +[[package]] +name = "nbconvert" +version = "7.16.4" +description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, + {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +bleach = "!=5.0.0" +defusedxml = "*" +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<4" +nbclient = ">=0.5.0" +nbformat = ">=5.7" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.1" + +[package.extras] +all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["pyqtwebengine (>=5.15)"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] +webpdf = ["playwright"] + +[[package]] +name = "nbformat" +version = "5.10.4" +description = "The Jupyter Notebook format" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, + {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, +] + +[package.dependencies] +fastjsonschema = ">=2.15" +jsonschema = ">=2.6" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +traitlets = ">=5.1" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] + [[package]] name = "nest-asyncio" version = "1.6.0" @@ -4104,6 +4755,46 @@ files = [ {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] +[[package]] +name = "notebook" +version = "7.2.1" +description = "Jupyter Notebook - A web-based notebook environment for interactive computing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "notebook-7.2.1-py3-none-any.whl", hash = "sha256:f45489a3995746f2195a137e0773e2130960b51c9ac3ce257dbc2705aab3a6ca"}, + {file = "notebook-7.2.1.tar.gz", hash = "sha256:4287b6da59740b32173d01d641f763d292f49c30e7a51b89c46ba8473126341e"}, +] + +[package.dependencies] +jupyter-server = ">=2.4.0,<3" +jupyterlab = ">=4.2.0,<4.3" +jupyterlab-server = ">=2.27.1,<3" +notebook-shim = ">=0.2,<0.3" +tornado = ">=6.2.0" + +[package.extras] +dev = ["hatch", "pre-commit"] +docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] + +[[package]] +name = "notebook-shim" +version = "0.2.4" +description = "A shim layer for notebook traits and config" +optional = false +python-versions = ">=3.7" +files = [ + {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, + {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, +] + +[package.dependencies] +jupyter-server = ">=1.8,<3" + +[package.extras] +test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] + [[package]] name = "numpy" version = "1.26.4" @@ -4705,6 +5396,32 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] xml = ["lxml (>=4.9.2)"] +[[package]] +name = "pandocfilters" +version = "1.5.1" +description = "Utilities for writing pandoc filters in python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, + {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, +] + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + [[package]] name = "pathspec" version = "0.12.1" @@ -4938,6 +5655,34 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "prometheus-client" +version = "0.20.0" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.8" +files = [ + {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, + {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, +] + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.47" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, +] + +[package.dependencies] +wcwidth = "*" + [[package]] name = "proto-plus" version = "1.24.0" @@ -4975,6 +5720,35 @@ files = [ {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] +[[package]] +name = "psutil" +version = "6.0.0" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, + {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, + {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, + {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, + {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, + {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, + {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, + {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, + {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, + {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + [[package]] name = "ptyprocess" version = "0.7.0" @@ -4986,6 +5760,20 @@ files = [ {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "py" version = "1.11.0" @@ -5541,6 +6329,17 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] + [[package]] name = "python-multipart" version = "0.0.9" @@ -5605,6 +6404,21 @@ files = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] +[[package]] +name = "pywinpty" +version = "2.0.13" +description = "Pseudo terminal support for Windows from Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, + {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, + {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, + {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, + {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, + {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -5665,6 +6479,106 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "pyzmq" +version = "26.0.3" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, + {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, + {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, + {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, + {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, + {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, + {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, + {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, + {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, + {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, + {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, + {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, + {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, + {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, + {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, + {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, + {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, + {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, + {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, + {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, + {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, + {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, + {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, + {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, + {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, + {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, + {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, + {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, + {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, + {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, + {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, + {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, + {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, + {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, + {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, + {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, + {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, + {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, + {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, + {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, + {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, + {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, + {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, + {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, + {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, + {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, + {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, + {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, + {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, + {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, + {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, + {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, + {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, + {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, + {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + [[package]] name = "referencing" version = "0.35.1" @@ -5842,6 +6756,31 @@ files = [ decorator = ">=3.4.2" py = ">=1.4.26,<2.0.0" +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] + [[package]] name = "rich" version = "13.7.1" @@ -6256,6 +7195,22 @@ dev = ["flake8", "flit", "mypy", "pandas-stubs", "pre-commit", "pytest", "pytest docs = ["ipykernel", "nbconvert", "numpydoc", "pydata_sphinx_theme (==0.10.0rc2)", "pyyaml", "sphinx (<6.0.0)", "sphinx-copybutton", "sphinx-design", "sphinx-issues"] stats = ["scipy (>=1.7)", "statsmodels (>=0.12)"] +[[package]] +name = "send2trash" +version = "1.8.3" +description = "Send file to trash natively under Mac OS X, Windows and Linux" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, + {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, +] + +[package.extras] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] +win32 = ["pywin32"] + [[package]] name = "sentence-transformers" version = "3.0.1" @@ -6495,6 +7450,25 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + [[package]] name = "starlette" version = "0.37.2" @@ -6629,6 +7603,27 @@ files = [ [package.extras] tests = ["pytest", "pytest-cov"] +[[package]] +name = "terminado" +version = "0.18.1" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, + {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, +] + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] +typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] + [[package]] name = "text-generation" version = "0.7.0" @@ -6708,6 +7703,24 @@ requests = ">=2.26.0" [package.extras] blobfile = ["blobfile (>=2)"] +[[package]] +name = "tinycss2" +version = "1.3.0" +description = "A tiny CSS parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, + {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, +] + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["pytest", "ruff"] + [[package]] name = "tokenizers" version = "0.19.1" @@ -6945,6 +7958,21 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + [[package]] name = "transformers" version = "4.42.1" @@ -7053,6 +8081,17 @@ rich = ">=10.11.0" shellingham = ">=1.3.0" typing-extensions = ">=3.7.4.3" +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20240316" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, + {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, +] + [[package]] name = "types-toml" version = "0.10.8.20240310" @@ -7199,6 +8238,20 @@ files = [ {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, ] +[[package]] +name = "uri-template" +version = "1.3.0" +description = "RFC 6570 URI Template Processor" +optional = false +python-versions = ">=3.7" +files = [ + {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, + {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, +] + +[package.extras] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] + [[package]] name = "uritemplate" version = "4.1.1" @@ -7447,6 +8500,43 @@ files = [ [package.dependencies] anyio = ">=3.0.0" +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "webcolors" +version = "24.6.0" +description = "A library for working with the color formats defined by HTML and CSS." +optional = false +python-versions = ">=3.8" +files = [ + {file = "webcolors-24.6.0-py3-none-any.whl", hash = "sha256:8cf5bc7e28defd1d48b9e83d5fc30741328305a8195c29a8e668fa45586568a1"}, + {file = "webcolors-24.6.0.tar.gz", hash = "sha256:1d160d1de46b3e81e58d0a280d0c78b467dc80f47294b91b1ad8029d2cedb55b"}, +] + +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] +tests = ["coverage[toml]"] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + [[package]] name = "websocket-client" version = "1.8.0" @@ -7971,4 +9061,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "ff32944a61014fa12313857b4c6eb08b9998f895e57699c659656188fc36535a" +content-hash = "00cddc214467b3a3d01499bd70c867e10ea8330984b92bb5382703aea7860faa" diff --git a/pyproject.toml b/pyproject.toml index 879808fb1b..e3ce992c60 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,6 +6,7 @@ authors = ["OpenDevin"] license = "MIT" readme = "README.md" repository = "https://github.com/OpenDevin/OpenDevin" +include = ["poetry.lock"] [tool.poetry.dependencies] python = "^3.11" @@ -56,13 +57,6 @@ pytest = "*" pytest-cov = "*" pytest-asyncio = "*" pytest-forked = "*" -flake8 = "*" -openai = "*" -python-docx = "*" -PyPDF2 = "*" -pylatexenc = "*" -python-pptx = "*" -opencv-python = "*" pandas = "*" reportlab = "*" @@ -77,6 +71,18 @@ retry = "*" evaluate = "*" swebench = { git = "https://github.com/OpenDevin/SWE-bench.git" } + +[tool.poetry.group.runtime.dependencies] +jupyterlab = "*" +notebook = "*" +jupyter_kernel_gateway = "*" +flake8 = "*" +python-docx = "*" +PyPDF2 = "*" +python-pptx = "*" +pylatexenc = "*" +opencv-python = "*" + [build-system] build-backend = "poetry.core.masonry.api" requires = [ diff --git a/tests/unit/test_image_agnostic_util.py b/tests/unit/test_image_agnostic_util.py index 7655cd45a2..6ae531553a 100644 --- a/tests/unit/test_image_agnostic_util.py +++ b/tests/unit/test_image_agnostic_util.py @@ -2,14 +2,14 @@ from unittest.mock import MagicMock, patch from opendevin.runtime.utils.image_agnostic import ( _get_new_image_name, - generate_dockerfile_content, + generate_dockerfile, get_od_sandbox_image, ) -def test_generate_dockerfile_content(): +def test_generate_dockerfile(): base_image = 'debian:11' - dockerfile_content = generate_dockerfile_content(base_image) + dockerfile_content = generate_dockerfile(base_image) assert base_image in dockerfile_content assert ( 'RUN apt update && apt install -y openssh-server wget sudo' @@ -17,17 +17,18 @@ def test_generate_dockerfile_content(): ) -def test_get_new_image_name(): +def test_get_new_image_name_legacy(): + # test non-eventstream runtime (sandbox-based) base_image = 'debian:11' - new_image_name = _get_new_image_name(base_image) + new_image_name = _get_new_image_name(base_image, is_eventstream_runtime=False) assert new_image_name == 'od_sandbox:debian__11' base_image = 'ubuntu:22.04' - new_image_name = _get_new_image_name(base_image) + new_image_name = _get_new_image_name(base_image, is_eventstream_runtime=False) assert new_image_name == 'od_sandbox:ubuntu__22.04' base_image = 'ubuntu' - new_image_name = _get_new_image_name(base_image) + new_image_name = _get_new_image_name(base_image, is_eventstream_runtime=False) assert new_image_name == 'od_sandbox:ubuntu__latest' @@ -46,5 +47,11 @@ def test_get_od_sandbox_image(mock_docker_client, mock_build_sandbox_image): image_name = get_od_sandbox_image(base_image, mock_docker_client) assert image_name == 'od_sandbox:debian__11' mock_build_sandbox_image.assert_called_once_with( - base_image, 'od_sandbox:debian__11', mock_docker_client + base_image, + 'od_sandbox:debian__11', + mock_docker_client, + # eventstream runtime specific arguments, not used for sandbox-based runtime + # is_eventstream_runtime= + False, + skip_init=False, )