Skip to content

Commit 9d9fbfc

Browse files
authored
Remove is_adv_enabled fixture (#1331)
1 parent 480a85c commit 9d9fbfc

File tree

4 files changed

+54
-137
lines changed

4 files changed

+54
-137
lines changed

tests/integration/conftest.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -750,13 +750,6 @@ def export_v2_test_helpers() -> Type[ExportV2Helpers]:
750750
return ExportV2Helpers()
751751

752752

753-
@pytest.fixture(scope="session")
754-
def is_adv_enabled(client) -> bool:
755-
query_str = "query IsAdvEnabledPyApi { user { isAdvEnabled } }"
756-
response = client.execute(query_str)
757-
return bool(response['user']['isAdvEnabled'])
758-
759-
760753
IMAGE_URL = "https://storage.googleapis.com/diagnostics-demo-data/coco/COCO_train2014_000000000034.jpg"
761754
EXTERNAL_ID = "my-image"
762755

tests/integration/test_data_row_metadata.py

Lines changed: 10 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ def test_bulk_partial_delete_datarow_metadata(data_row, mdo):
227227
assert len(fields) == (len(metadata.fields) - 1)
228228

229229

230-
def test_large_bulk_delete_datarow_metadata(big_dataset, mdo, is_adv_enabled):
230+
def test_large_bulk_delete_datarow_metadata(big_dataset, mdo):
231231
metadata = []
232232
data_row_ids = [dr.uid for dr in big_dataset.data_rows()]
233233
for data_row_id in data_row_ids:
@@ -249,13 +249,11 @@ def test_large_bulk_delete_datarow_metadata(big_dataset, mdo, is_adv_enabled):
249249
data_row_id=data_row_id,
250250
fields=[SPLIT_SCHEMA_ID, CAPTURE_DT_SCHEMA_ID]))
251251
errors = mdo.bulk_delete(deletes)
252-
if is_adv_enabled:
253-
assert len(errors) == len(data_row_ids)
254-
for error in errors:
255-
assert error.fields == [CAPTURE_DT_SCHEMA_ID]
256-
assert error.error == 'Schema did not exist'
257-
else:
258-
assert len(errors) == 0
252+
253+
assert len(errors) == len(data_row_ids)
254+
for error in errors:
255+
assert error.fields == [CAPTURE_DT_SCHEMA_ID]
256+
assert error.error == 'Schema did not exist'
259257

260258
for data_row_id in data_row_ids:
261259
fields = [f for f in mdo.bulk_export([data_row_id])[0].fields]
@@ -308,17 +306,14 @@ def test_upsert_non_existent_schema_id(data_row, mdo):
308306
mdo.bulk_upsert([metadata])
309307

310308

311-
def test_delete_non_existent_schema_id(data_row, mdo, is_adv_enabled):
309+
def test_delete_non_existent_schema_id(data_row, mdo):
312310
res = mdo.bulk_delete([
313311
DeleteDataRowMetadata(data_row_id=data_row.uid,
314312
fields=[SPLIT_SCHEMA_ID])
315313
])
316-
if is_adv_enabled:
317-
assert len(res) == 1
318-
assert res[0].fields == [SPLIT_SCHEMA_ID]
319-
assert res[0].error == 'Schema did not exist'
320-
else:
321-
assert len(res) == 0
314+
assert len(res) == 1
315+
assert res[0].fields == [SPLIT_SCHEMA_ID]
316+
assert res[0].error == 'Schema did not exist'
322317

323318

324319
def test_parse_raw_metadata(mdo):

tests/integration/test_data_rows.py

Lines changed: 38 additions & 98 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,9 @@
1010

1111
from labelbox import DataRow
1212
from labelbox.exceptions import MalformedQueryException
13-
from labelbox.schema.export_filters import DatarowExportFilters
1413
from labelbox.schema.task import Task
1514
from labelbox.schema.data_row_metadata import DataRowMetadataField, DataRowMetadataKind
1615
import labelbox.exceptions
17-
from utils import INTEGRATION_SNAPSHOT_DIRECTORY
1816

1917
SPLIT_SCHEMA_ID = "cko8sbczn0002h2dkdaxb5kal"
2018
TEST_SPLIT_ID = "cko8scbz70005h2dkastwhgqt"
@@ -484,8 +482,7 @@ def create_data_row(data_rows):
484482
CUSTOM_TEXT_SCHEMA_NAME].uid
485483

486484

487-
def test_create_data_rows_with_invalid_metadata(dataset, image_url,
488-
is_adv_enabled):
485+
def test_create_data_rows_with_invalid_metadata(dataset, image_url):
489486
fields = make_metadata_fields()
490487
# make the payload invalid by providing the same schema id more than once
491488
fields.append(
@@ -496,14 +493,11 @@ def test_create_data_rows_with_invalid_metadata(dataset, image_url,
496493
DataRow.metadata_fields: fields
497494
}])
498495
task.wait_till_done(timeout_seconds=60)
499-
if is_adv_enabled:
500-
assert task.status == "COMPLETE"
501-
assert len(task.failed_data_rows) == 1
502-
assert f"A schemaId can only be specified once per DataRow : [{TEXT_SCHEMA_ID}]" in task.failed_data_rows[
503-
0]["message"]
504-
else:
505-
assert task.status == "FAILED"
506-
assert len(task.failed_data_rows) > 0
496+
497+
assert task.status == "COMPLETE"
498+
assert len(task.failed_data_rows) == 1
499+
assert f"A schemaId can only be specified once per DataRow : [{TEXT_SCHEMA_ID}]" in task.failed_data_rows[
500+
0]["message"]
507501

508502

509503
def test_create_data_rows_with_metadata_missing_value(dataset, image_url):
@@ -815,7 +809,7 @@ def test_data_row_bulk_creation_with_unique_global_keys(dataset, sample_image):
815809

816810

817811
def test_data_row_bulk_creation_with_same_global_keys(dataset, sample_image,
818-
snapshot, is_adv_enabled):
812+
snapshot):
819813
global_key_1 = str(uuid.uuid4())
820814
task = dataset.create_data_rows([{
821815
DataRow.row_data: sample_image,
@@ -826,48 +820,22 @@ def test_data_row_bulk_creation_with_same_global_keys(dataset, sample_image,
826820
}])
827821

828822
task.wait_till_done()
829-
if is_adv_enabled:
830-
assert task.status == "COMPLETE"
831-
assert type(task.failed_data_rows) is list
832-
assert len(task.failed_data_rows) == 1
833-
assert type(task.created_data_rows) is list
834-
assert len(task.created_data_rows) == 1
835-
assert task.failed_data_rows[0][
836-
'message'] == f"Duplicate global key: '{global_key_1}'"
837-
assert task.failed_data_rows[0]['failedDataRows'][0][
838-
'externalId'] == sample_image
839-
assert task.created_data_rows[0]['externalId'] == sample_image
840-
assert task.created_data_rows[0]['globalKey'] == global_key_1
841-
else:
842-
assert task.status == "FAILED"
843-
assert len(task.failed_data_rows) > 0
844-
assert len(list(dataset.data_rows())) == 0
845-
assert task.errors == "Data rows contain duplicate global keys"
846-
847-
# Dynamic values, resetting to make snapshot
848-
task.failed_data_rows[0]['failedDataRows'][0]['rowData'] = ''
849-
task.failed_data_rows[0]['failedDataRows'][1]['rowData'] = ''
850-
task.failed_data_rows[0]['failedDataRows'][0]['globalKey'] = ''
851-
task.failed_data_rows[0]['failedDataRows'][1]['globalKey'] = ''
852-
snapshot.snapshot_dir = INTEGRATION_SNAPSHOT_DIRECTORY
853-
snapshot.assert_match(
854-
json.dumps(task.failed_data_rows),
855-
'test_data_rows.test_data_row_bulk_creation_with_same_global_keys.failed_data_rows.json'
856-
)
857-
858-
task = dataset.create_data_rows([{
859-
DataRow.row_data: sample_image,
860-
DataRow.global_key: global_key_1
861-
}])
862823

863-
task.wait_till_done()
864-
assert task.status == "COMPLETE"
865-
assert len(list(dataset.data_rows())) == 1
866-
assert list(dataset.data_rows())[0].global_key == global_key_1
824+
assert task.status == "COMPLETE"
825+
assert type(task.failed_data_rows) is list
826+
assert len(task.failed_data_rows) == 1
827+
assert type(task.created_data_rows) is list
828+
assert len(task.created_data_rows) == 1
829+
assert task.failed_data_rows[0][
830+
'message'] == f"Duplicate global key: '{global_key_1}'"
831+
assert task.failed_data_rows[0]['failedDataRows'][0][
832+
'externalId'] == sample_image
833+
assert task.created_data_rows[0]['externalId'] == sample_image
834+
assert task.created_data_rows[0]['globalKey'] == global_key_1
867835

868836

869837
def test_data_row_delete_and_create_with_same_global_key(
870-
client, dataset, sample_image, is_adv_enabled):
838+
client, dataset, sample_image):
871839
global_key_1 = str(uuid.uuid4())
872840
data_row_payload = {
873841
DataRow.row_data: sample_image,
@@ -887,15 +855,10 @@ def test_data_row_delete_and_create_with_same_global_key(
887855
task = dataset.create_data_rows([data_row_payload])
888856
task.wait_till_done()
889857

890-
if is_adv_enabled:
891-
assert task.status == "COMPLETE"
892-
assert len(task.failed_data_rows) == 1
893-
assert task.failed_data_rows[0][
894-
'message'] == f"Duplicate global key: '{global_key_1}'"
895-
else:
896-
assert task.status == "FAILED"
897-
assert len(task.failed_data_rows) > 0
898-
assert task.errors.startswith("Duplicate global keys found")
858+
assert task.status == "COMPLETE"
859+
assert len(task.failed_data_rows) == 1
860+
assert task.failed_data_rows[0][
861+
'message'] == f"Duplicate global key: '{global_key_1}'"
899862

900863
# delete datarow
901864
client.get_data_row(new_data_row_id).delete()
@@ -934,7 +897,7 @@ def test_data_row_bulk_creation_sync_with_unique_global_keys(
934897

935898

936899
def test_data_row_bulk_creation_sync_with_same_global_keys(
937-
dataset, sample_image, is_adv_enabled):
900+
dataset, sample_image):
938901
global_key_1 = str(uuid.uuid4())
939902

940903
with pytest.raises(labelbox.exceptions.MalformedQueryException) as exc_info:
@@ -946,22 +909,10 @@ def test_data_row_bulk_creation_sync_with_same_global_keys(
946909
DataRow.global_key: global_key_1
947910
}])
948911

949-
if is_adv_enabled:
950-
# ADV will import the first data row but not the second (duplicate global key)
951-
assert len(list(dataset.data_rows())) == 1
952-
assert list(dataset.data_rows())[0].global_key == global_key_1
953-
assert "Some data rows were not imported. Check error output here" in str(
954-
exc_info.value)
955-
else:
956-
assert len(list(dataset.data_rows())) == 0
957-
958-
dataset.create_data_rows_sync([{
959-
DataRow.row_data: sample_image,
960-
DataRow.global_key: global_key_1
961-
}])
962-
963-
assert len(list(dataset.data_rows())) == 1
964-
assert list(dataset.data_rows())[0].global_key == global_key_1
912+
assert len(list(dataset.data_rows())) == 1
913+
assert list(dataset.data_rows())[0].global_key == global_key_1
914+
assert "Some data rows were not imported. Check error output here" in str(
915+
exc_info.value)
965916

966917

967918
@pytest.fixture
@@ -995,27 +946,19 @@ def test_create_conversational_text(converstational_data_rows,
995946
data_row.row_data).json() == conversational_content['row_data']
996947

997948

998-
def test_invalid_media_type(dataset, conversational_content, is_adv_enabled):
999-
for error_message, invalid_media_type in [[
1000-
"Found invalid contents for media type: 'IMAGE'", 'IMAGE'
1001-
], ["Found invalid media type: 'totallyinvalid'", 'totallyinvalid']]:
949+
def test_invalid_media_type(dataset, conversational_content):
950+
for _, __ in [["Found invalid contents for media type: 'IMAGE'", 'IMAGE'],
951+
[
952+
"Found invalid media type: 'totallyinvalid'",
953+
'totallyinvalid'
954+
]]:
1002955
# TODO: What error kind should this be? It looks like for global key we are
1003956
# using malformed query. But for invalid contents in FileUploads we use InvalidQueryError
1004957
with pytest.raises(labelbox.exceptions.InvalidQueryError):
1005958
dataset.create_data_rows_sync([{
1006959
**conversational_content, 'media_type': 'IMAGE'
1007960
}])
1008961

1009-
if is_adv_enabled:
1010-
# ADV does not take media type hint into account for async import requests
1011-
continue
1012-
1013-
task = dataset.create_data_rows([{
1014-
**conversational_content, 'media_type': invalid_media_type
1015-
}])
1016-
task.wait_till_done()
1017-
assert task.errors == {'message': error_message}
1018-
1019962

1020963
def test_create_tiled_layer(dataset, tile_content):
1021964
examples = [
@@ -1044,15 +987,12 @@ def test_create_data_row_with_attachments(dataset):
1044987
assert len(attachments) == 1
1045988

1046989

1047-
def test_create_data_row_with_media_type(dataset, image_url, is_adv_enabled):
990+
def test_create_data_row_with_media_type(dataset, image_url):
1048991
with pytest.raises(labelbox.exceptions.InvalidQueryError) as exc:
1049992
dr = dataset.create_data_row(
1050993
row_data={'invalid_object': 'invalid_value'}, media_type="IMAGE")
1051-
if is_adv_enabled:
1052-
assert "Media type validation failed, expected: 'image/*', was: application/json" in str(
1053-
exc.value)
1054-
else:
1055-
assert "Found invalid contents for media type: \'IMAGE\'" in str(
1056-
exc.value)
994+
995+
assert "Media type validation failed, expected: 'image/*', was: application/json" in str(
996+
exc.value)
1057997

1058998
dataset.create_data_row(row_data=image_url, media_type="IMAGE")

tests/integration/test_task.py

Lines changed: 6 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
TEXT_SCHEMA_ID = "cko8s9r5v0001h2dk9elqdidh"
99

1010

11-
def test_task_errors(dataset, image_url, snapshot, is_adv_enabled):
11+
def test_task_errors(dataset, image_url, snapshot):
1212
client = dataset.client
1313
task = dataset.create_data_rows([
1414
{
@@ -25,22 +25,11 @@ def test_task_errors(dataset, image_url, snapshot, is_adv_enabled):
2525

2626
assert task in client.get_user().created_tasks()
2727
task.wait_till_done()
28-
if is_adv_enabled:
29-
assert len(task.failed_data_rows) == 1
30-
assert "A schemaId can only be specified once per DataRow : [cko8s9r5v0001h2dk9elqdidh]" in task.failed_data_rows[
31-
0]['message']
32-
assert len(
33-
task.failed_data_rows[0]['failedDataRows'][0]['metadata']) == 2
34-
else:
35-
snapshot.snapshot_dir = INTEGRATION_SNAPSHOT_DIRECTORY
36-
# RowData is dynamic, so we need to remove it from the snapshot
37-
task.failed_data_rows[0]['failedDataRows'][0]['rowData'] = ''
38-
snapshot.assert_match(
39-
json.dumps(task.failed_data_rows),
40-
'test_task.test_task_errors.failed_data_rows.json')
41-
assert task.errors is not None
42-
snapshot.assert_match(json.dumps(task.errors),
43-
'test_task.test_task_errors.errors.json')
28+
29+
assert len(task.failed_data_rows) == 1
30+
assert "A schemaId can only be specified once per DataRow : [cko8s9r5v0001h2dk9elqdidh]" in task.failed_data_rows[
31+
0]['message']
32+
assert len(task.failed_data_rows[0]['failedDataRows'][0]['metadata']) == 2
4433

4534

4635
def test_task_success_json(dataset, image_url, snapshot):

0 commit comments

Comments
 (0)