|
9 | 9 | from pytest_httpserver import HTTPServer, URIPattern |
10 | 10 | from werkzeug.wrappers import Request, Response |
11 | 11 |
|
12 | | -from ollama._client import AsyncClient, Client, _copy_tools |
| 12 | +from ollama._client import CONNECTION_ERROR_MESSAGE, AsyncClient, Client, _copy_tools |
13 | 13 |
|
14 | 14 | PNG_BASE64 = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC' |
15 | 15 | PNG_BYTES = base64.b64decode(PNG_BASE64) |
@@ -1112,3 +1112,30 @@ def test_tool_validation(): |
1112 | 1112 | with pytest.raises(ValidationError): |
1113 | 1113 | invalid_tool = {'type': 'invalid_type', 'function': {'name': 'test'}} |
1114 | 1114 | list(_copy_tools([invalid_tool])) |
| 1115 | + |
| 1116 | + |
| 1117 | +def test_client_connection_error(): |
| 1118 | + client = Client('http://localhost:1234') |
| 1119 | + |
| 1120 | + with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE): |
| 1121 | + client.chat('model', messages=[{'role': 'user', 'content': 'prompt'}]) |
| 1122 | + with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE): |
| 1123 | + client.chat('model', messages=[{'role': 'user', 'content': 'prompt'}]) |
| 1124 | + with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE): |
| 1125 | + client.generate('model', 'prompt') |
| 1126 | + with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE): |
| 1127 | + client.show('model') |
| 1128 | + |
| 1129 | + |
| 1130 | +@pytest.mark.asyncio |
| 1131 | +async def test_async_client_connection_error(): |
| 1132 | + client = AsyncClient('http://localhost:1234') |
| 1133 | + with pytest.raises(ConnectionError) as exc_info: |
| 1134 | + await client.chat('model', messages=[{'role': 'user', 'content': 'prompt'}]) |
| 1135 | + assert str(exc_info.value) == 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download' |
| 1136 | + with pytest.raises(ConnectionError) as exc_info: |
| 1137 | + await client.generate('model', 'prompt') |
| 1138 | + assert str(exc_info.value) == 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download' |
| 1139 | + with pytest.raises(ConnectionError) as exc_info: |
| 1140 | + await client.show('model') |
| 1141 | + assert str(exc_info.value) == 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download' |
0 commit comments