Skip to content

Commit 6489884

Browse files
author
Dmitriy Apollonin
committed
add tests for recreating the same global key
1 parent 01a151f commit 6489884

File tree

1 file changed

+35
-0
lines changed

1 file changed

+35
-0
lines changed

tests/integration/test_data_rows.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -786,6 +786,41 @@ def test_data_row_bulk_creation_with_same_global_keys(dataset, sample_image):
786786
assert len(list(dataset.data_rows())) == 1
787787
assert list(dataset.data_rows())[0].global_key == global_key_1
788788

789+
def test_data_row_delete_and_create_with_same_global_key(client, dataset, sample_image):
790+
global_key_1 = str(uuid.uuid4())
791+
data_row_payload = {
792+
DataRow.row_data: sample_image,
793+
DataRow.global_key: global_key_1
794+
}
795+
796+
# should successfully insert new datarow
797+
task = dataset.create_data_rows([data_row_payload])
798+
task.wait_till_done()
799+
800+
assert task.status == "COMPLETE"
801+
assert task.result[0]['global_key'] == global_key_1
802+
803+
new_data_row_id = task.result[0]['id']
804+
805+
# same payload should fail due to duplicated global key
806+
task = dataset.create_data_rows([data_row_payload])
807+
task.wait_till_done()
808+
809+
assert task.status == "FAILED"
810+
assert len(task.failed_data_rows) > 0
811+
assert task.errors.startswith("Duplicate global keys found")
812+
813+
# delete datarow
814+
client.get_data_row(new_data_row_id).delete()
815+
816+
# should successfully insert new datarow now
817+
task = dataset.create_data_rows([data_row_payload])
818+
task.wait_till_done()
819+
820+
assert task.status == "COMPLETE"
821+
assert task.result[0]['global_key'] == global_key_1
822+
823+
789824

790825
def test_data_row_bulk_creation_sync_with_unique_global_keys(
791826
dataset, sample_image):

0 commit comments

Comments
 (0)