Files
autogen/python/packages/autogen-ext/pyproject.toml
PythicCoder 6a3acc4548 Feature add Add LlamaCppChatCompletionClient and llama-cpp (#5326)
This pull request introduces the integration of the `llama-cpp` library
into the `autogen-ext` package, with significant changes to the project
dependencies and the implementation of a new chat completion client. The
most important changes include updating the project dependencies, adding
a new module for the `LlamaCppChatCompletionClient`, and implementing
the client with various functionalities.

### Project Dependencies:

*
[`python/packages/autogen-ext/pyproject.toml`](diffhunk://#diff-095119d4420ff09059557bd25681211d1772c2be0fbe0ff2d551a3726eff1b4bR34-R38):
Added `llama-cpp-python` as a new dependency under the `llama-cpp`
section.

### New Module:

*
[`python/packages/autogen-ext/src/autogen_ext/models/llama_cpp/__init__.py`](diffhunk://#diff-42ae3ba17d51ca917634c4ea3c5969cf930297c288a783f8d9c126f2accef71dR1-R8):
Introduced the `LlamaCppChatCompletionClient` class and handled import
errors with a descriptive message for missing dependencies.

### Implementation of `LlamaCppChatCompletionClient`:

*
`python/packages/autogen-ext/src/autogen_ext/models/llama_cpp/_llama_cpp_completion_client.py`:
- Added the `LlamaCppChatCompletionClient` class with methods to
initialize the client, create chat completions, detect and execute
tools, and handle streaming responses.
- Included detailed logging for debugging purposes and implemented
methods to count tokens, track usage, and provide model information.…d
chat capabilities

<!-- Thank you for your contribution! Please review
https://microsoft.github.io/autogen/docs/Contribute before opening a
pull request. -->

<!-- Please add a reviewer to the assignee section when you create a PR.
If you don't have the access to it, we will shortly find a reviewer and
assign them to your PR. -->

## Why are these changes needed?

<!-- Please give a short summary of the change and the problem this
solves. -->

## Related issue number

<!-- For example: "Closes #1234" -->

## Checks

- [X ] I've included any doc changes needed for
https://microsoft.github.io/autogen/. See
https://microsoft.github.io/autogen/docs/Contribute#documentation to
build and test documentation locally.
- [X ] I've added tests (if relevant) corresponding to the changes
introduced in this PR.
- [ X] I've made sure all auto checks have passed.

---------

Co-authored-by: aribornstein <x@x.com>
Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com>
Co-authored-by: Ryan Sweet <rysweet@microsoft.com>
2025-03-10 16:53:53 -07:00

183 lines
3.7 KiB
TOML

[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "autogen-ext"
version = "0.4.8"
license = {file = "LICENSE-CODE"}
description = "AutoGen extensions library"
readme = "README.md"
requires-python = ">=3.10"
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
]
dependencies = [
"autogen-core==0.4.8",
]
[project.optional-dependencies]
langchain = ["langchain_core~= 0.3.3"]
azure = [
"azure-ai-inference>=1.0.0b7",
"azure-core",
"azure-identity",
]
docker = ["docker~=7.0", "asyncio_atexit>=1.0.1"]
ollama = ["ollama>=0.4.7", "tiktoken>=0.8.0"]
openai = ["openai>=1.52.2", "tiktoken>=0.8.0", "aiofiles"]
file-surfer = [
"autogen-agentchat==0.4.8",
"markitdown~=0.0.1",
]
llama-cpp = [
"llama-cpp-python>=0.1.9",
]
graphrag = ["graphrag>=1.0.1"]
chromadb = ["chromadb"]
web-surfer = [
"autogen-agentchat==0.4.8",
"playwright>=1.48.0",
"pillow>=11.0.0",
"markitdown~=0.0.1",
]
magentic-one = [
"autogen-agentchat==0.4.8",
"markitdown~=0.0.1",
"playwright>=1.48.0",
"pillow>=11.0.0",
]
video-surfer = [
"autogen-agentchat==0.4.8",
"opencv-python>=4.5",
"ffmpeg-python",
"openai-whisper",
]
diskcache = [
"diskcache>=5.6.3"
]
redis = [
"redis>=5.2.1"
]
grpc = [
"grpcio~=1.70.0",
]
jupyter-executor = [
"ipykernel>=6.29.5",
"nbclient>=0.10.2",
]
task-centric-memory = ["chromadb>=0.6.3"]
semantic-kernel-core = [
"semantic-kernel>=1.17.1",
]
gemini = [
"google-genai>=1.0.0",
]
semantic-kernel-google = [
"semantic-kernel[google]>=1.17.1",
]
semantic-kernel-hugging-face = [
"semantic-kernel[hugging_face]>=1.17.1",
]
semantic-kernel-mistralai = [
"semantic-kernel[mistralai]>=1.17.1",
]
semantic-kernel-ollama = [
"semantic-kernel[ollama]>=1.17.1",
]
semantic-kernel-onnx = [
"semantic-kernel[onnx]>=1.17.1",
]
semantic-kernel-anthropic = [
"semantic-kernel[anthropic]>=1.17.1",
]
semantic-kernel-pandas = [
"semantic-kernel[pandas]>=1.17.1",
]
semantic-kernel-aws = [
"semantic-kernel[aws]>=1.17.1",
]
semantic-kernel-dapr = [
"semantic-kernel[dapr]>=1.17.1",
]
http-tool = [
"httpx>=0.27.0",
"json-schema-to-pydantic>=0.2.0"
]
semantic-kernel-all = [
"semantic-kernel[google,hugging_face,mistralai,ollama,onnx,anthropic,usearch,pandas,aws,dapr]>=1.17.1",
]
rich = ["rich>=13.9.4"]
mcp = [
"mcp>=1.1.3",
"json-schema-to-pydantic>=0.2.2"
]
[tool.hatch.build.targets.wheel]
packages = ["src/autogen_ext"]
[dependency-groups]
dev = [
"autogen_test_utils",
"langchain-experimental",
"pandas-stubs>=2.2.3.241126",
"httpx>=0.28.1",
]
[tool.ruff]
extend = "../../pyproject.toml"
include = ["src/**", "tests/*.py"]
exclude = ["src/autogen_ext/agents/web_surfer/*.js", "src/autogen_ext/runtimes/grpc/protos", "tests/protos", "README.md"]
[tool.pyright]
extends = "../../pyproject.toml"
include = ["src", "tests"]
exclude = ["src/autogen_ext/runtimes/grpc/protos", "tests/protos"]
[tool.pytest.ini_options]
minversion = "6.0"
testpaths = ["tests"]
markers = [
"grpc",
]
[tool.poe]
include = "../../shared_tasks.toml"
[tool.poe.tasks]
test.sequence = [
"playwright install",
"pytest -n 1 --cov=src --cov-report=term-missing --cov-report=xml",
]
test.default_item_type = "cmd"
test-grpc = "pytest -n 1 --cov=src --cov-report=term-missing --cov-report=xml --grpc"
test-windows = "pytest -n 1 --cov=src --cov-report=term-missing --cov-report=xml -m 'windows'"
mypy = "mypy --config-file ../../pyproject.toml --exclude src/autogen_ext/runtimes/grpc/protos --exclude tests/protos src tests"
[tool.mypy]
[[tool.mypy.overrides]]
module = "docker.*"
ignore_missing_imports = true