@@ -151,14 +151,14 @@ def test_data_row_bulk_creation(dataset, rand_gen, image_url):
151151@pytest .mark .slow
152152def test_data_row_large_bulk_creation (dataset , image_url ):
153153 # Do a longer task and expect it not to be complete immediately
154- n_local = 2000
155- n_urls = 250
154+ n_urls = 1000
155+ n_local = 250
156156 with NamedTemporaryFile () as fp :
157157 fp .write ("Test data" .encode ())
158158 fp .flush ()
159159 task = dataset .create_data_rows ([{
160160 DataRow .row_data : image_url
161- }] * n_local + [fp .name ] * n_urls )
161+ }] * n_urls + [fp .name ] * n_local )
162162 task .wait_till_done ()
163163 assert task .status == "COMPLETE"
164164 assert len (list (dataset .data_rows ())) == n_local + n_urls
@@ -353,7 +353,7 @@ def test_create_data_rows_with_invalid_metadata(dataset, image_url):
353353 DataRow .metadata_fields : fields
354354 }])
355355 task .wait_till_done ()
356- assert task .status == "COMPLETE "
356+ assert task .status == "FAILED "
357357 assert len (task .failed_data_rows ) > 0
358358
359359
@@ -634,9 +634,10 @@ def test_data_row_bulk_creation_with_same_global_keys(dataset, sample_image):
634634 }])
635635
636636 task .wait_till_done ()
637- assert task .status == "COMPLETE "
637+ assert task .status == "FAILED "
638638 assert len (task .failed_data_rows ) > 0
639639 assert len (list (dataset .data_rows ())) == 0
640+ assert task .errors == "Import job failed"
640641
641642 task = dataset .create_data_rows ([{
642643 DataRow .row_data : sample_image ,
0 commit comments