Skip to content

Commit 5275d8e

Browse files
authored
[AL-6133] Optimize and stabilize sdk integration tests part I (#1195)
1 parent 63cfb52 commit 5275d8e

File tree

8 files changed

+98
-32
lines changed

8 files changed

+98
-32
lines changed

tests/integration/test_client_errors.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,17 @@ def test_network_error(client):
7777
client.create_project(name="Project name")
7878

7979

80-
def test_invalid_attribute_error(client, rand_gen):
80+
@pytest.fixture
81+
def project_for_test_invalid_attribute_error(client):
82+
project = client.create_project(name="Project name")
83+
yield project
84+
project.delete()
85+
86+
87+
def test_invalid_attribute_error(client, rand_gen,
88+
project_for_test_invalid_attribute_error):
89+
project = project_for_test_invalid_attribute_error
90+
8191
# Creation
8292
with pytest.raises(labelbox.exceptions.InvalidAttributeError) as excinfo:
8393
client.create_project(name="Name", invalid_field="Whatever")
@@ -109,8 +119,6 @@ def test_invalid_attribute_error(client, rand_gen):
109119
assert excinfo.value.db_object_type == Project
110120
assert excinfo.value.field == {User.email}
111121

112-
project.delete()
113-
114122

115123
@pytest.mark.skip("timeouts cause failure before rate limit")
116124
def test_api_limit_error(client):

tests/integration/test_data_rows.py

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,8 @@ def test_data_row_bulk_creation(dataset, rand_gen, image_url):
201201
url = ({data_row.row_data for data_row in data_rows} - {image_url}).pop()
202202
assert requests.get(url).content == data
203203

204-
data_rows[0].delete()
204+
for dr in data_rows:
205+
dr.delete()
205206

206207

207208
@pytest.mark.slow
@@ -589,13 +590,23 @@ def test_data_row_filtering_sorting(dataset, image_url):
589590
assert row2.external_id == "row2"
590591

591592

592-
def test_data_row_deletion(dataset, image_url):
593+
@pytest.fixture
594+
def create_datarows_for_data_row_deletion(dataset, image_url):
593595
task = dataset.create_data_rows([{
594596
DataRow.row_data: image_url,
595597
DataRow.external_id: str(i)
596598
} for i in range(10)])
597599
task.wait_till_done()
598600

601+
data_rows = list(dataset.data_rows())
602+
603+
yield data_rows
604+
for dr in data_rows:
605+
dr.delete()
606+
607+
608+
def test_data_row_deletion(dataset, create_datarows_for_data_row_deletion):
609+
create_datarows_for_data_row_deletion
599610
data_rows = list(dataset.data_rows())
600611
expected = set(map(str, range(10)))
601612
assert {dr.external_id for dr in data_rows} == expected

tests/integration/test_dataset.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,19 +44,26 @@ def test_dataset(client, rand_gen):
4444
dataset = client.get_dataset(dataset.uid)
4545

4646

47-
def test_dataset_filtering(client, rand_gen):
47+
@pytest.fixture
48+
def dataset_for_filtering(client, rand_gen):
4849
name_1 = rand_gen(str)
4950
name_2 = rand_gen(str)
5051
d1 = client.create_dataset(name=name_1)
5152
d2 = client.create_dataset(name=name_2)
5253

53-
assert list(client.get_datasets(where=Dataset.name == name_1)) == [d1]
54-
assert list(client.get_datasets(where=Dataset.name == name_2)) == [d2]
54+
yield name_1, d1, name_2, d2
5555

5656
d1.delete()
5757
d2.delete()
5858

5959

60+
def test_dataset_filtering(client, dataset_for_filtering):
61+
name_1, d1, name_2, d2 = dataset_for_filtering
62+
63+
assert list(client.get_datasets(where=Dataset.name == name_1)) == [d1]
64+
assert list(client.get_datasets(where=Dataset.name == name_2)) == [d2]
65+
66+
6067
def test_get_data_row_for_external_id(dataset, rand_gen, image_url):
6168
external_id = rand_gen(str)
6269

tests/integration/test_filtering.py

Lines changed: 16 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,16 +5,29 @@
55
from labelbox.schema.queue_mode import QueueMode
66

77

8-
# Avoid assertions using equality to prevent intermittent failures due to
9-
# other builds simultaneously adding projects to test org
10-
def test_where(client, image_url, rand_gen):
8+
@pytest.fixture
9+
def project_to_test_where(client, rand_gen):
1110
p_a_name = f"a-{rand_gen(str)}"
1211
p_b_name = f"b-{rand_gen(str)}"
1312
p_c_name = f"c-{rand_gen(str)}"
13+
1414
p_a = client.create_project(name=p_a_name, queue_mode=QueueMode.Batch)
1515
p_b = client.create_project(name=p_b_name, queue_mode=QueueMode.Batch)
1616
p_c = client.create_project(name=p_c_name, queue_mode=QueueMode.Batch)
1717

18+
yield p_a, p_b, p_c
19+
20+
p_a.delete()
21+
p_b.delete()
22+
p_c.delete()
23+
24+
25+
# Avoid assertions using equality to prevent intermittent failures due to
26+
# other builds simultaneously adding projects to test org
27+
def test_where(client, image_url, project_to_test_where, rand_gen):
28+
p_a, p_b, p_c = project_to_test_where
29+
p_a_name, p_b_name, p_c_name = [p.name for p in [p_a, p_b, p_c]]
30+
1831
def _get(f, where=None):
1932
date_where = Project.created_at >= p_a.created_at
2033
where = date_where if where is None else where & date_where
@@ -64,10 +77,6 @@ def get(where=None):
6477
assert {p_a.uid, p_b.uid}.issubset(le_b) and p_c.uid not in le_b
6578

6679
batch.delete()
67-
p_a.delete()
68-
p_b.delete()
69-
p_c.delete()
70-
dataset.delete()
7180

7281

7382
def test_unsupported_where(client):

tests/integration/test_labeling_parameter_overrides.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,5 +66,3 @@ def test_labeling_parameter_overrides(consensus_project, initial_dataset,
6666
project.set_labeling_parameter_overrides(data)
6767
assert str(exc_info.value) == \
6868
f"Priority must be greater than 0 for data_row {data_rows[2]}. Index: 0"
69-
70-
dataset.delete()

tests/integration/test_ontology.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@
55
import json
66
import time
77

8+
from labelbox.schema.queue_mode import QueueMode
9+
810

911
def test_feature_schema_is_not_archived(client, ontology):
1012
feature_schema_to_check = ontology.normalized['tools'][0]
@@ -85,6 +87,7 @@ def test_deletes_an_ontology(client):
8587

8688
def test_cant_delete_an_ontology_with_project(client):
8789
project = client.create_project(name="test project",
90+
queue_mode=QueueMode.Batch,
8891
media_type=MediaType.Image)
8992
tool = client.upsert_feature_schema(point.asdict())
9093
feature_schema_id = tool.normalized['featureSchemaId']
@@ -155,6 +158,7 @@ def test_does_not_include_used_ontologies(client):
155158
feature_schema_ids=[feature_schema_id],
156159
media_type=MediaType.Image)
157160
project = client.create_project(name="test project",
161+
queue_mode=QueueMode.Batch,
158162
media_type=MediaType.Image)
159163
project.setup_editor(ontology_with_project)
160164
unused_ontologies = client.get_unused_ontologies()

tests/integration/test_pagination.py

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,30 @@
11
from copy import copy
2+
import time
23

4+
import pytest
35

4-
def test_get_one_and_many_dataset_order(client):
5-
paginator = client.get_datasets()
6+
from labelbox.schema.dataset import Dataset
7+
8+
9+
@pytest.fixture
10+
def data_for_dataset_order_test(client, rand_gen):
11+
name = rand_gen(str)
12+
dataset1 = client.create_dataset(name=name)
13+
dataset2 = client.create_dataset(name=name)
14+
15+
yield name
16+
17+
dataset1.delete()
18+
dataset2.delete()
19+
20+
21+
def test_get_one_and_many_dataset_order(client, data_for_dataset_order_test):
22+
name = data_for_dataset_order_test
23+
24+
paginator = client.get_datasets(where=Dataset.name == name)
625
# confirm get_one returns first dataset
726
all_datasets = list(paginator)
27+
assert len(all_datasets) == 2
828
get_one_dataset = copy(paginator).get_one()
929
assert get_one_dataset.uid == all_datasets[0].uid
1030

tests/integration/test_project.py

Lines changed: 22 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,25 @@ def test_project_export_v2_datarow_list(
213213
]) == set(global_keys[:datarow_filter_size])
214214

215215

216-
def test_update_project_resource_tags(client, rand_gen):
216+
@pytest.fixture
217+
def data_for_project_test(client, rand_gen):
218+
projects = []
219+
220+
def _create_project(name: str = None):
221+
if name is None:
222+
name = rand_gen(str)
223+
project = client.create_project(name=name)
224+
projects.append(project)
225+
return project
226+
227+
yield _create_project
228+
229+
for project in projects:
230+
project.delete()
231+
232+
233+
def test_update_project_resource_tags(client, rand_gen, data_for_project_test):
234+
p1 = data_for_project_test()
217235

218236
def delete_tag(tag_id: str):
219237
"""Deletes a tag given the tag uid. Currently internal use only so this is not public"""
@@ -226,15 +244,9 @@ def delete_tag(tag_id: str):
226244
""", {"tag_id": tag_id})
227245
return res
228246

229-
before = list(client.get_projects())
230-
for o in before:
231-
assert isinstance(o, Project)
232-
233247
org = client.get_organization()
234248
assert org.uid is not None
235249

236-
project_name = rand_gen(str)
237-
p1 = client.create_project(name=project_name)
238250
assert p1.uid is not None
239251

240252
colorA = "#ffffff"
@@ -273,18 +285,15 @@ def delete_tag(tag_id: str):
273285
delete_tag(tagB.uid)
274286

275287

276-
def test_project_filtering(client, rand_gen):
288+
def test_project_filtering(client, rand_gen, data_for_project_test):
277289
name_1 = rand_gen(str)
290+
p1 = data_for_project_test(name_1)
278291
name_2 = rand_gen(str)
279-
p1 = client.create_project(name=name_1)
280-
p2 = client.create_project(name=name_2)
292+
p2 = data_for_project_test(name_2)
281293

282294
assert list(client.get_projects(where=Project.name == name_1)) == [p1]
283295
assert list(client.get_projects(where=Project.name == name_2)) == [p2]
284296

285-
p1.delete()
286-
p2.delete()
287-
288297

289298
def test_upsert_review_queue(project):
290299
project.upsert_review_queue(0.6)

0 commit comments

Comments
 (0)