Fix integ tests.

This commit is contained in:
Goran Peretin
2024-05-25 08:54:27 +00:00
committed by Senko Rašić
parent 995414d615
commit 6e7de30434
3 changed files with 11 additions and 14 deletions

View File

@@ -5,6 +5,7 @@ import pytest
from core.config import LLMConfig, LLMProvider from core.config import LLMConfig, LLMProvider
from core.llm.anthropic_client import AnthropicClient from core.llm.anthropic_client import AnthropicClient
from core.llm.base import APIError
from core.llm.convo import Convo from core.llm.convo import Convo
from core.llm.request_log import LLMRequestStatus from core.llm.request_log import LLMRequestStatus
@@ -34,7 +35,7 @@ async def test_incorrect_key():
llm = AnthropicClient(cfg, stream_handler=print_handler) llm = AnthropicClient(cfg, stream_handler=print_handler)
convo = Convo("you're a friendly assistant").user("tell me joke") convo = Convo("you're a friendly assistant").user("tell me joke")
with pytest.raises(ValueError, match="invalid x-api-key"): with pytest.raises(APIError, match="invalid x-api-key"):
await llm(convo) await llm(convo)
@@ -49,7 +50,7 @@ async def test_unknown_model():
llm = AnthropicClient(cfg) llm = AnthropicClient(cfg)
convo = Convo("you're a friendly assistant").user("tell me joke") convo = Convo("you're a friendly assistant").user("tell me joke")
with pytest.raises(ValueError, match="model: gpt-3.6-nonexistent"): with pytest.raises(APIError, match="model: gpt-3.6-nonexistent"):
await llm(convo) await llm(convo)

View File

@@ -4,6 +4,7 @@ from os import getenv
import pytest import pytest
from core.config import LLMConfig, LLMProvider from core.config import LLMConfig, LLMProvider
from core.llm.base import APIError
from core.llm.convo import Convo from core.llm.convo import Convo
from core.llm.groq_client import GroqClient from core.llm.groq_client import GroqClient
@@ -33,7 +34,7 @@ async def test_incorrect_key():
llm = GroqClient(cfg, stream_handler=print_handler) llm = GroqClient(cfg, stream_handler=print_handler)
convo = Convo("you're a friendly assistant").user("tell me joke") convo = Convo("you're a friendly assistant").user("tell me joke")
with pytest.raises(ValueError, match="Invalid API Key"): with pytest.raises(APIError, match="Invalid API Key"):
await llm(convo) await llm(convo)
@@ -48,7 +49,7 @@ async def test_unknown_model():
llm = GroqClient(cfg) llm = GroqClient(cfg)
convo = Convo("you're a friendly assistant").user("tell me joke") convo = Convo("you're a friendly assistant").user("tell me joke")
with pytest.raises(ValueError, match="does not exist"): with pytest.raises(APIError, match="does not exist"):
await llm(convo) await llm(convo)
@@ -117,8 +118,5 @@ async def test_context_too_large():
large_convo = " ".join(["lorem ipsum dolor sit amet"] * 30000) large_convo = " ".join(["lorem ipsum dolor sit amet"] * 30000)
convo = Convo(large_convo) convo = Convo(large_convo)
with pytest.raises(ValueError, match="Context limit exceeded."): with pytest.raises(APIError, match="We sent too large request to the LLM"):
await llm(convo) await llm(convo)
streamed = "".join(streamed_response)
assert "We sent too large request to the LLM" in streamed

View File

@@ -4,6 +4,7 @@ from os import getenv
import pytest import pytest
from core.config import LLMConfig, LLMProvider from core.config import LLMConfig, LLMProvider
from core.llm.base import APIError
from core.llm.convo import Convo from core.llm.convo import Convo
from core.llm.openai_client import OpenAIClient from core.llm.openai_client import OpenAIClient
@@ -33,7 +34,7 @@ async def test_incorrect_key():
llm = OpenAIClient(cfg, stream_handler=print_handler) llm = OpenAIClient(cfg, stream_handler=print_handler)
convo = Convo("you're a friendly assistant").user("tell me joke") convo = Convo("you're a friendly assistant").user("tell me joke")
with pytest.raises(ValueError, match="Incorrect API key provided: sk-inc"): with pytest.raises(APIError, match="Incorrect API key provided: sk-inc"):
await llm(convo) await llm(convo)
@@ -48,7 +49,7 @@ async def test_unknown_model():
llm = OpenAIClient(cfg) llm = OpenAIClient(cfg)
convo = Convo("you're a friendly assistant").user("tell me joke") convo = Convo("you're a friendly assistant").user("tell me joke")
with pytest.raises(ValueError, match="does not exist"): with pytest.raises(APIError, match="does not exist"):
await llm(convo) await llm(convo)
@@ -114,8 +115,5 @@ async def test_context_too_large():
convo = Convo("you're a friendly assistant") convo = Convo("you're a friendly assistant")
large_convo = " ".join(["lorem ipsum dolor sit amet"] * 30000) large_convo = " ".join(["lorem ipsum dolor sit amet"] * 30000)
convo.user(large_convo) convo.user(large_convo)
with pytest.raises(ValueError, match="Context limit exceeded."): with pytest.raises(APIError, match="We sent too large request to the LLM"):
await llm(convo) await llm(convo)
streamed = "".join(streamed_response)
assert "We sent too large request to the LLM" in streamed