Skip to content

Commit 4a72cfd

Browse files
authored
Merge pull request #864 from Labelbox/AL-4817-create-test-case-around-duplicate-global-keys-issue
add tests for recreating the same global key
2 parents 01a151f + 39f1838 commit 4a72cfd

File tree

1 file changed

+36
-0
lines changed

1 file changed

+36
-0
lines changed

tests/integration/test_data_rows.py

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -787,6 +787,42 @@ def test_data_row_bulk_creation_with_same_global_keys(dataset, sample_image):
787787
assert list(dataset.data_rows())[0].global_key == global_key_1
788788

789789

790+
def test_data_row_delete_and_create_with_same_global_key(
791+
client, dataset, sample_image):
792+
global_key_1 = str(uuid.uuid4())
793+
data_row_payload = {
794+
DataRow.row_data: sample_image,
795+
DataRow.global_key: global_key_1
796+
}
797+
798+
# should successfully insert new datarow
799+
task = dataset.create_data_rows([data_row_payload])
800+
task.wait_till_done()
801+
802+
assert task.status == "COMPLETE"
803+
assert task.result[0]['global_key'] == global_key_1
804+
805+
new_data_row_id = task.result[0]['id']
806+
807+
# same payload should fail due to duplicated global key
808+
task = dataset.create_data_rows([data_row_payload])
809+
task.wait_till_done()
810+
811+
assert task.status == "FAILED"
812+
assert len(task.failed_data_rows) > 0
813+
assert task.errors.startswith("Duplicate global keys found")
814+
815+
# delete datarow
816+
client.get_data_row(new_data_row_id).delete()
817+
818+
# should successfully insert new datarow now
819+
task = dataset.create_data_rows([data_row_payload])
820+
task.wait_till_done()
821+
822+
assert task.status == "COMPLETE"
823+
assert task.result[0]['global_key'] == global_key_1
824+
825+
790826
def test_data_row_bulk_creation_sync_with_unique_global_keys(
791827
dataset, sample_image):
792828
global_key_1 = str(uuid.uuid4())

0 commit comments

Comments
 (0)