11from datetime import datetime , timezone , timedelta
22
33import pytest
4+ import uuid
5+ from typing import Tuple
46
57from labelbox .schema .media_type import MediaType
8+ from labelbox import Project , Dataset
9+ from labelbox .schema .data_row import DataRow
10+ from labelbox .schema .label import Label
611
712IMAGE_URL = "https://storage.googleapis.com/lb-artifacts-testing-public/sdk_integration_test/potato.jpeg"
813
@@ -86,17 +91,25 @@ def test_project_export_v2_date_filters(client, export_v2_test_helpers,
8691
8792 filters = {
8893 "last_activity_at" : ["2000-01-01 00:00:00" , "2050-01-01 00:00:00" ],
89- "label_created_at" : ["2000-01-01 00:00:00" , "2050-01-01 00:00:00" ]
94+ "label_created_at" : ["2000-01-01 00:00:00" , "2050-01-01 00:00:00" ],
95+ "task_queue_status" : "InReview"
9096 }
9197
9298 # TODO: Right now we don't have a way to test this
9399 include_performance_details = True
94100 params = {
95- "include_performance_details " : include_performance_details ,
101+ "performance_details " : include_performance_details ,
96102 "include_labels" : True ,
103+ "project_details" : True ,
97104 "media_type_override" : MediaType .Image
98105 }
99106
107+ task_queues = project .task_queues ()
108+
109+ review_queue = next (
110+ tq for tq in task_queues if tq .queue_type == "MANUAL_REVIEW_QUEUE" )
111+ project .move_data_rows_to_task_queue ([data_row .uid ], review_queue .uid )
112+
100113 task_results = export_v2_test_helpers .run_project_export_v2_task (
101114 project , task_name = task_name , filters = filters , params = params )
102115
@@ -105,6 +118,7 @@ def test_project_export_v2_date_filters(client, export_v2_test_helpers,
105118 task_project_label_ids_set = set (
106119 map (lambda prediction : prediction ['id' ], task_project ['labels' ]))
107120 assert label_id in task_project_label_ids_set
121+ assert task_project ['project_details' ]['workflow_status' ] == 'IN_REVIEW'
108122
109123 # TODO: Add back in when we have a way to test this
110124 # if include_performance_details:
@@ -156,7 +170,7 @@ def test_project_export_v2_with_iso_date_filters(client, export_v2_test_helpers,
156170
157171
158172@pytest .mark .parametrize ("data_rows" , [3 ], indirect = True )
159- def test_project_export_v2_datarow_filter (
173+ def test_project_export_v2_datarows_filter (
160174 export_v2_test_helpers ,
161175 configured_batch_project_with_multiple_datarows ):
162176 project , _ , data_rows = configured_batch_project_with_multiple_datarows
@@ -178,3 +192,56 @@ def test_project_export_v2_datarow_filter(
178192 # only filtered datarows should be exported
179193 assert set ([dr ['data_row' ]['id' ] for dr in task_results
180194 ]) == set (data_row_ids [:datarow_filter_size ])
195+
196+ global_keys = [dr .global_key for dr in data_rows ]
197+ filters = {
198+ "last_activity_at" : ["2000-01-01 00:00:00" , "2050-01-01 00:00:00" ],
199+ "label_created_at" : ["2000-01-01 00:00:00" , "2050-01-01 00:00:00" ],
200+ "global_keys" : global_keys [:datarow_filter_size ]
201+ }
202+ params = {"data_row_details" : True , "media_type_override" : MediaType .Image }
203+ task_results = export_v2_test_helpers .run_project_export_v2_task (
204+ project , filters = filters , params = params )
205+
206+ # only 2 datarows should be exported
207+ assert len (task_results ) == datarow_filter_size
208+ # only filtered datarows should be exported
209+ assert set ([dr ['data_row' ]['global_key' ] for dr in task_results
210+ ]) == set (global_keys [:datarow_filter_size ])
211+
212+
213+ def test_batch_project_export_v2 (
214+ configured_batch_project_with_label : Tuple [Project , Dataset , DataRow ,
215+ Label ],
216+ export_v2_test_helpers , dataset : Dataset , image_url : str ):
217+ project , dataset , * _ = configured_batch_project_with_label
218+
219+ batch = list (project .batches ())[0 ]
220+ filters = {
221+ "last_activity_at" : ["2000-01-01 00:00:00" , "2050-01-01 00:00:00" ],
222+ "label_created_at" : ["2000-01-01 00:00:00" , "2050-01-01 00:00:00" ],
223+ "batch_ids" : [batch .uid ],
224+ }
225+ params = {
226+ "include_performance_details" : True ,
227+ "include_labels" : True ,
228+ "media_type_override" : MediaType .Image
229+ }
230+ task_name = "test_batch_export_v2"
231+ task = dataset .create_data_rows ([
232+ {
233+ "row_data" : image_url ,
234+ "external_id" : "my-image"
235+ },
236+ ] * 2 )
237+ task .wait_till_done ()
238+ data_rows = [dr .uid for dr in list (dataset .export_data_rows ())]
239+ batch_one = f'batch one { uuid .uuid4 ()} '
240+
241+ # This test creates two batches, only one batch should be exporter
242+ # Creatin second batch that will not be used in the export due to the filter: batch_id
243+ project .create_batch (batch_one , data_rows )
244+
245+ task_results = export_v2_test_helpers .run_project_export_v2_task (
246+ project , task_name = task_name , filters = filters , params = params )
247+ assert (batch .size == len (task_results ))
0 commit comments