11import time
22from typing import List
33from uuid import uuid4
4-
54import pytest
65
76from labelbox import Dataset , Project
@@ -92,7 +91,10 @@ def test_create_batch_async(project: Project,
9291
9392def test_create_batch_with_consensus_settings (project : Project ,
9493 small_dataset : Dataset ):
95- data_rows = [dr .uid for dr in list (small_dataset .export_data_rows ())]
94+ export_task = small_dataset .export ()
95+ export_task .wait_till_done ()
96+ stream = export_task .get_buffered_stream ()
97+ data_rows = [dr .json ["data_row" ]["id" ] for dr in stream ]
9698 consensus_settings = {"coverage_percentage" : 0.1 , "number_of_labels" : 3 }
9799 batch = project .create_batch ("batch with consensus settings" ,
98100 data_rows ,
@@ -105,31 +107,44 @@ def test_create_batch_with_consensus_settings(project: Project,
105107
106108def test_create_batch_with_data_row_class (project : Project ,
107109 small_dataset : Dataset ):
108- data_rows = list (small_dataset .export_data_rows ())
110+ export_task = small_dataset .export ()
111+ export_task .wait_till_done ()
112+ stream = export_task .get_buffered_stream ()
113+ data_rows = [dr .json ["data_row" ]["id" ] for dr in stream ]
109114 batch = project .create_batch ("test-batch-data-rows" , data_rows , 3 )
110115 assert batch .name == "test-batch-data-rows"
111116 assert batch .size == len (data_rows )
112117
113118
114119def test_archive_batch (project : Project , small_dataset : Dataset ):
115- data_rows = [dr .uid for dr in list (small_dataset .export_data_rows ())]
120+ export_task = small_dataset .export ()
121+ export_task .wait_till_done ()
122+ stream = export_task .get_buffered_stream ()
123+ data_rows = [dr .json ["data_row" ]["id" ] for dr in stream ]
124+
116125 batch = project .create_batch ("batch to archive" , data_rows )
117126 batch .remove_queued_data_rows ()
118- exported_data_rows = list ( batch . export_data_rows () )
119-
120- assert len ( exported_data_rows ) == 0
127+ overview = project . get_overview ( )
128+
129+ assert overview . to_label == 0
121130
122131
123132def test_delete (project : Project , small_dataset : Dataset ):
124- data_rows = [dr .uid for dr in list (small_dataset .export_data_rows ())]
133+ export_task = small_dataset .export ()
134+ export_task .wait_till_done ()
135+ stream = export_task .get_buffered_stream ()
136+ data_rows = [dr .json ["data_row" ]["id" ] for dr in stream ]
125137 batch = project .create_batch ("batch to delete" , data_rows )
126138 batch .delete ()
127139
128140 assert len (list (project .batches ())) == 0
129141
130142
131143def test_batch_project (project : Project , small_dataset : Dataset ):
132- data_rows = [dr .uid for dr in list (small_dataset .export_data_rows ())]
144+ export_task = small_dataset .export ()
145+ export_task .wait_till_done ()
146+ stream = export_task .get_buffered_stream ()
147+ data_rows = [dr .json ["data_row" ]["id" ] for dr in stream ]
133148 batch = project .create_batch ("batch to test project relationship" ,
134149 data_rows )
135150
@@ -186,6 +201,7 @@ def test_batch_creation_with_processing_timeout(
186201 project ._wait_processing_max_seconds = stashed_wait_timeout
187202
188203
204+ @pytest .mark .export_v1 ("export_v1 test remove later" )
189205def test_export_data_rows (project : Project , dataset : Dataset , image_url : str ,
190206 external_id : str ):
191207 n_data_rows = 2
@@ -255,18 +271,34 @@ def test_list_project_batches_with_no_batches(project: Project):
255271 reason = "Test cannot be used effectively with MAL/LabelImport. \
256272 Fix/Unskip after resolving deletion with MAL/LabelImport" )
257273def test_delete_labels (project , small_dataset ):
258- data_rows = [dr .uid for dr in list (small_dataset .export_data_rows ())]
274+ export_task = small_dataset .export ()
275+ export_task .wait_till_done ()
276+ stream = export_task .get_buffered_stream ()
277+ data_rows = [dr .json ["data_row" ]["id" ] for dr in stream ]
259278 batch = project .create_batch ("batch to delete labels" , data_rows )
260279
261280
262281@pytest .mark .skip (
263282 reason = "Test cannot be used effectively with MAL/LabelImport. \
264283 Fix/Unskip after resolving deletion with MAL/LabelImport" )
265284def test_delete_labels_with_templates (project : Project , small_dataset : Dataset ):
266- data_rows = [dr .uid for dr in list (small_dataset .export_data_rows ())]
285+ export_task = small_dataset .export ()
286+ export_task .wait_till_done ()
287+ stream = export_task .get_buffered_stream ()
288+ data_rows = [dr .json ["data_row" ]["id" ] for dr in stream ]
267289 batch = project .create_batch ("batch to delete labels w templates" ,
268290 data_rows )
269- exported_data_rows = list (batch .export_data_rows ())
291+
292+ export_task = project .export (filters = {"batch_ids" : [batch .uid ]})
293+ export_task .wait_till_done ()
294+ stream = export_task .get_buffered_stream ()
295+ exported_data_rows = [dr .json ["data_row" ]["id" ] for dr in stream ]
296+
270297 res = batch .delete_labels (labels_as_template = True )
271- exported_data_rows = list (batch .export_data_rows ())
298+
299+ export_task = project .export (filters = {"batch_ids" : [batch .uid ]})
300+ export_task .wait_till_done ()
301+ stream = export_task .get_buffered_stream ()
302+ exported_data_rows = [dr .json ["data_row" ]["id" ] for dr in stream ]
303+
272304 assert len (exported_data_rows ) == 5
0 commit comments