Skip to content

Commit 708314f

Browse files
committed
add tests
1 parent e145879 commit 708314f

File tree

2 files changed

+758
-0
lines changed

2 files changed

+758
-0
lines changed
Lines changed: 354 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,354 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the terms described in the LICENSE file in
5+
# the root directory of this source tree.
6+
7+
import pytest
8+
from llama_stack_client import LlamaStackClient
9+
10+
from llama_stack import LlamaStackAsLibraryClient
11+
12+
13+
class TestDynamicProviderManagement:
14+
"""Integration tests for dynamic provider registration, update, and unregistration."""
15+
16+
def test_register_and_unregister_inference_provider(
17+
self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient
18+
):
19+
"""Test registering and unregistering an inference provider."""
20+
provider_id = "test-dynamic-inference"
21+
22+
# Clean up if exists from previous test
23+
try:
24+
llama_stack_client.providers.unregister(provider_id)
25+
except Exception:
26+
pass
27+
28+
# Register a new inference provider (using Ollama since it's available in test setup)
29+
response = llama_stack_client.providers.register(
30+
provider_id=provider_id,
31+
api="inference",
32+
provider_type="remote::ollama",
33+
config={
34+
"url": "http://localhost:11434",
35+
"api_token": "",
36+
},
37+
)
38+
39+
# Verify registration
40+
assert response.provider.provider_id == provider_id
41+
assert response.provider.api == "inference"
42+
assert response.provider.provider_type == "remote::ollama"
43+
assert response.provider.status in ["connected", "initializing"]
44+
45+
# Verify provider appears in list
46+
providers = llama_stack_client.providers.list()
47+
provider_ids = [p.provider_id for p in providers]
48+
assert provider_id in provider_ids
49+
50+
# Verify we can retrieve it
51+
provider = llama_stack_client.providers.retrieve(provider_id)
52+
assert provider.provider_id == provider_id
53+
54+
# Unregister the provider
55+
llama_stack_client.providers.unregister(provider_id)
56+
57+
# Verify it's no longer in the list
58+
providers = llama_stack_client.providers.list()
59+
provider_ids = [p.provider_id for p in providers]
60+
assert provider_id not in provider_ids
61+
62+
def test_register_and_unregister_vector_store_provider(
63+
self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient
64+
):
65+
"""Test registering and unregistering a vector store provider."""
66+
provider_id = "test-dynamic-vector-store"
67+
68+
# Clean up if exists
69+
try:
70+
llama_stack_client.providers.unregister(provider_id)
71+
except Exception:
72+
pass
73+
74+
# Register a new vector_io provider (using Faiss inline)
75+
response = llama_stack_client.providers.register(
76+
provider_id=provider_id,
77+
api="vector_io",
78+
provider_type="inline::faiss",
79+
config={
80+
"embedding_dimension": 768,
81+
"kvstore": {
82+
"type": "sqlite",
83+
"namespace": f"test_vector_store_{provider_id}",
84+
},
85+
},
86+
)
87+
88+
# Verify registration
89+
assert response.provider.provider_id == provider_id
90+
assert response.provider.api == "vector_io"
91+
assert response.provider.provider_type == "inline::faiss"
92+
93+
# Verify provider appears in list
94+
providers = llama_stack_client.providers.list()
95+
provider_ids = [p.provider_id for p in providers]
96+
assert provider_id in provider_ids
97+
98+
# Unregister
99+
llama_stack_client.providers.unregister(provider_id)
100+
101+
# Verify removal
102+
providers = llama_stack_client.providers.list()
103+
provider_ids = [p.provider_id for p in providers]
104+
assert provider_id not in provider_ids
105+
106+
def test_update_provider_config(self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient):
107+
"""Test updating a provider's configuration."""
108+
provider_id = "test-update-config"
109+
110+
# Clean up if exists
111+
try:
112+
llama_stack_client.providers.unregister(provider_id)
113+
except Exception:
114+
pass
115+
116+
# Register provider
117+
llama_stack_client.providers.register(
118+
provider_id=provider_id,
119+
api="inference",
120+
provider_type="remote::ollama",
121+
config={
122+
"url": "http://localhost:11434",
123+
"api_token": "old-token",
124+
},
125+
)
126+
127+
# Update the configuration
128+
response = llama_stack_client.providers.update(
129+
provider_id=provider_id,
130+
config={
131+
"url": "http://localhost:11434",
132+
"api_token": "new-token",
133+
},
134+
)
135+
136+
# Verify update
137+
assert response.provider.provider_id == provider_id
138+
assert response.provider.config["api_token"] == "new-token"
139+
140+
# Clean up
141+
llama_stack_client.providers.unregister(provider_id)
142+
143+
def test_update_provider_attributes(self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient):
144+
"""Test updating a provider's ABAC attributes."""
145+
provider_id = "test-update-attributes"
146+
147+
# Clean up if exists
148+
try:
149+
llama_stack_client.providers.unregister(provider_id)
150+
except Exception:
151+
pass
152+
153+
# Register provider with initial attributes
154+
llama_stack_client.providers.register(
155+
provider_id=provider_id,
156+
api="inference",
157+
provider_type="remote::ollama",
158+
config={
159+
"url": "http://localhost:11434",
160+
},
161+
attributes={"team": ["team-a"]},
162+
)
163+
164+
# Update attributes
165+
response = llama_stack_client.providers.update(
166+
provider_id=provider_id,
167+
attributes={"team": ["team-a", "team-b"], "environment": ["test"]},
168+
)
169+
170+
# Verify attributes were updated
171+
assert response.provider.attributes["team"] == ["team-a", "team-b"]
172+
assert response.provider.attributes["environment"] == ["test"]
173+
174+
# Clean up
175+
llama_stack_client.providers.unregister(provider_id)
176+
177+
def test_test_provider_connection(self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient):
178+
"""Test the connection testing functionality."""
179+
provider_id = "test-connection-check"
180+
181+
# Clean up if exists
182+
try:
183+
llama_stack_client.providers.unregister(provider_id)
184+
except Exception:
185+
pass
186+
187+
# Register provider
188+
llama_stack_client.providers.register(
189+
provider_id=provider_id,
190+
api="inference",
191+
provider_type="remote::ollama",
192+
config={
193+
"url": "http://localhost:11434",
194+
},
195+
)
196+
197+
# Test the connection
198+
response = llama_stack_client.providers.test_connection(provider_id)
199+
200+
# Verify response structure
201+
assert hasattr(response, "success")
202+
assert hasattr(response, "health")
203+
204+
# Note: success may be True or False depending on whether Ollama is actually running
205+
# but the test should at least verify the API works
206+
207+
# Clean up
208+
llama_stack_client.providers.unregister(provider_id)
209+
210+
def test_register_duplicate_provider_fails(self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient):
211+
"""Test that registering a duplicate provider ID fails."""
212+
provider_id = "test-duplicate"
213+
214+
# Clean up if exists
215+
try:
216+
llama_stack_client.providers.unregister(provider_id)
217+
except Exception:
218+
pass
219+
220+
# Register first provider
221+
llama_stack_client.providers.register(
222+
provider_id=provider_id,
223+
api="inference",
224+
provider_type="remote::ollama",
225+
config={"url": "http://localhost:11434"},
226+
)
227+
228+
# Try to register with same ID - should fail
229+
with pytest.raises(Exception) as exc_info:
230+
llama_stack_client.providers.register(
231+
provider_id=provider_id,
232+
api="inference",
233+
provider_type="remote::ollama",
234+
config={"url": "http://localhost:11435"},
235+
)
236+
237+
# Verify error message mentions the provider already exists
238+
assert "already exists" in str(exc_info.value).lower() or "duplicate" in str(exc_info.value).lower()
239+
240+
# Clean up
241+
llama_stack_client.providers.unregister(provider_id)
242+
243+
def test_unregister_nonexistent_provider_fails(
244+
self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient
245+
):
246+
"""Test that unregistering a non-existent provider fails."""
247+
with pytest.raises(Exception) as exc_info:
248+
llama_stack_client.providers.unregister("nonexistent-provider-12345")
249+
250+
# Verify error message mentions provider not found
251+
assert "not found" in str(exc_info.value).lower() or "does not exist" in str(exc_info.value).lower()
252+
253+
def test_update_nonexistent_provider_fails(self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient):
254+
"""Test that updating a non-existent provider fails."""
255+
with pytest.raises(Exception) as exc_info:
256+
llama_stack_client.providers.update(
257+
provider_id="nonexistent-provider-12345",
258+
config={"url": "http://localhost:11434"},
259+
)
260+
261+
# Verify error message mentions provider not found
262+
assert "not found" in str(exc_info.value).lower() or "does not exist" in str(exc_info.value).lower()
263+
264+
def test_provider_lifecycle_with_inference(
265+
self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient
266+
):
267+
"""Test full lifecycle: register, use for inference (if Ollama available), update, unregister."""
268+
provider_id = "test-lifecycle-inference"
269+
270+
# Clean up if exists
271+
try:
272+
llama_stack_client.providers.unregister(provider_id)
273+
except Exception:
274+
pass
275+
276+
# Register provider
277+
response = llama_stack_client.providers.register(
278+
provider_id=provider_id,
279+
api="inference",
280+
provider_type="remote::ollama",
281+
config={
282+
"url": "http://localhost:11434",
283+
},
284+
)
285+
286+
assert response.provider.status in ["connected", "initializing"]
287+
288+
# Test connection
289+
conn_test = llama_stack_client.providers.test_connection(provider_id)
290+
assert hasattr(conn_test, "success")
291+
292+
# Update configuration
293+
update_response = llama_stack_client.providers.update(
294+
provider_id=provider_id,
295+
config={
296+
"url": "http://localhost:11434",
297+
"api_token": "updated-token",
298+
},
299+
)
300+
assert update_response.provider.config["api_token"] == "updated-token"
301+
302+
# Unregister
303+
llama_stack_client.providers.unregister(provider_id)
304+
305+
# Verify it's gone
306+
providers = llama_stack_client.providers.list()
307+
provider_ids = [p.provider_id for p in providers]
308+
assert provider_id not in provider_ids
309+
310+
def test_multiple_providers_same_type(self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient):
311+
"""Test registering multiple providers of the same type with different IDs."""
312+
provider_id_1 = "test-multi-ollama-1"
313+
provider_id_2 = "test-multi-ollama-2"
314+
315+
# Clean up if exists
316+
for pid in [provider_id_1, provider_id_2]:
317+
try:
318+
llama_stack_client.providers.unregister(pid)
319+
except Exception:
320+
pass
321+
322+
# Register first provider
323+
response1 = llama_stack_client.providers.register(
324+
provider_id=provider_id_1,
325+
api="inference",
326+
provider_type="remote::ollama",
327+
config={"url": "http://localhost:11434"},
328+
)
329+
assert response1.provider.provider_id == provider_id_1
330+
331+
# Register second provider with same type but different ID
332+
response2 = llama_stack_client.providers.register(
333+
provider_id=provider_id_2,
334+
api="inference",
335+
provider_type="remote::ollama",
336+
config={"url": "http://localhost:11434"},
337+
)
338+
assert response2.provider.provider_id == provider_id_2
339+
340+
# Verify both are in the list
341+
providers = llama_stack_client.providers.list()
342+
provider_ids = [p.provider_id for p in providers]
343+
assert provider_id_1 in provider_ids
344+
assert provider_id_2 in provider_ids
345+
346+
# Clean up both
347+
llama_stack_client.providers.unregister(provider_id_1)
348+
llama_stack_client.providers.unregister(provider_id_2)
349+
350+
# Verify both are gone
351+
providers = llama_stack_client.providers.list()
352+
provider_ids = [p.provider_id for p in providers]
353+
assert provider_id_1 not in provider_ids
354+
assert provider_id_2 not in provider_ids

0 commit comments

Comments
 (0)