@@ -97,50 +97,42 @@ def test_batch_project(batch_project: Project, small_dataset: Dataset):
9797
9898
9999def test_batch_creation_for_data_rows_with_issues (
100- batch_project : Project ,
101- small_dataset : Dataset ,
102- dataset_with_invalid_data_rows : Dataset
103- ):
100+ batch_project : Project , small_dataset : Dataset ,
101+ dataset_with_invalid_data_rows : Dataset ):
104102 """
105103 Create a batch containing both valid and invalid data rows
106104 """
107105 valid_data_rows = [dr .uid for dr in list (small_dataset .data_rows ())]
108- invalid_data_rows = [dr .uid for dr in list (
109- dataset_with_invalid_data_rows .data_rows ())]
106+ invalid_data_rows = [
107+ dr .uid for dr in list (dataset_with_invalid_data_rows .data_rows ())
108+ ]
110109 data_rows_to_add = valid_data_rows + invalid_data_rows
111110
112111 assert len (data_rows_to_add ) == 5
113- batch = batch_project .create_batch (
114- "batch to test failed data rows" ,
115- data_rows_to_add
116- )
112+ batch = batch_project .create_batch ("batch to test failed data rows" ,
113+ data_rows_to_add )
117114
118115 assert len (batch .failed_data_row_ids ) == 2
119116
120117 failed_data_row_ids_set = set (batch .failed_data_row_ids )
121118 invalid_data_rows_set = set (invalid_data_rows )
122- assert len (failed_data_row_ids_set .intersection (
123- invalid_data_rows_set )) == 2
119+ assert len (failed_data_row_ids_set .intersection (invalid_data_rows_set )) == 2
124120
125121
126- def test_batch_creation_with_processing_timeout (
127- batch_project : Project ,
128- small_dataset : Dataset ,
129- unique_dataset : Dataset
130- ):
122+ def test_batch_creation_with_processing_timeout (batch_project : Project ,
123+ small_dataset : Dataset ,
124+ unique_dataset : Dataset ):
131125 """
132126 Create a batch with zero wait time, this means that the waiting logic will throw exception immediately
133127 """
134128 # wait for these data rows to be processed
135129 valid_data_rows = [dr .uid for dr in list (small_dataset .data_rows ())]
136130 batch_project ._wait_until_data_rows_are_processed (
137- valid_data_rows , wait_processing_max_seconds = 3600 , sleep_interval = 5
138- )
131+ valid_data_rows , wait_processing_max_seconds = 3600 , sleep_interval = 5 )
139132
140133 # upload data rows for this dataset and don't wait
141134 upload_invalid_data_rows_for_dataset (unique_dataset )
142- unprocessed_data_rows = [dr .uid for dr in list (
143- unique_dataset .data_rows ())]
135+ unprocessed_data_rows = [dr .uid for dr in list (unique_dataset .data_rows ())]
144136
145137 data_row_ids = valid_data_rows + unprocessed_data_rows
146138
@@ -149,10 +141,8 @@ def test_batch_creation_with_processing_timeout(
149141 # emulate the situation where there are still some data rows being
150142 # processed but wait timeout exceeded
151143 batch_project ._wait_processing_max_seconds = 0
152- batch_project .create_batch (
153- "batch to test failed data rows" ,
154- data_row_ids
155- )
144+ batch_project .create_batch ("batch to test failed data rows" ,
145+ data_row_ids )
156146 batch_project ._wait_processing_max_seconds = stashed_wait_timeout
157147
158148
0 commit comments