@@ -787,6 +787,42 @@ def test_data_row_bulk_creation_with_same_global_keys(dataset, sample_image):
787787 assert list (dataset .data_rows ())[0 ].global_key == global_key_1
788788
789789
790+ def test_data_row_delete_and_create_with_same_global_key (
791+ client , dataset , sample_image ):
792+ global_key_1 = str (uuid .uuid4 ())
793+ data_row_payload = {
794+ DataRow .row_data : sample_image ,
795+ DataRow .global_key : global_key_1
796+ }
797+
798+ # should successfully insert new datarow
799+ task = dataset .create_data_rows ([data_row_payload ])
800+ task .wait_till_done ()
801+
802+ assert task .status == "COMPLETE"
803+ assert task .result [0 ]['global_key' ] == global_key_1
804+
805+ new_data_row_id = task .result [0 ]['id' ]
806+
807+ # same payload should fail due to duplicated global key
808+ task = dataset .create_data_rows ([data_row_payload ])
809+ task .wait_till_done ()
810+
811+ assert task .status == "FAILED"
812+ assert len (task .failed_data_rows ) > 0
813+ assert task .errors .startswith ("Duplicate global keys found" )
814+
815+ # delete datarow
816+ client .get_data_row (new_data_row_id ).delete ()
817+
818+ # should successfully insert new datarow now
819+ task = dataset .create_data_rows ([data_row_payload ])
820+ task .wait_till_done ()
821+
822+ assert task .status == "COMPLETE"
823+ assert task .result [0 ]['global_key' ] == global_key_1
824+
825+
790826def test_data_row_bulk_creation_sync_with_unique_global_keys (
791827 dataset , sample_image ):
792828 global_key_1 = str (uuid .uuid4 ())
0 commit comments