Skip to content

Commit 72a801f

Browse files
committed
AL-4081: Reformat with yapf
1 parent 95f7964 commit 72a801f

File tree

5 files changed

+43
-37
lines changed

5 files changed

+43
-37
lines changed

labelbox/client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -767,7 +767,7 @@ def get_data_row_ids_for_external_ids(
767767
for row in self.execute(
768768
query_str,
769769
{'externalId_in': external_ids[i:i + max_ids_per_request]
770-
})['externalIdsToDataRowIds']:
770+
})['externalIdsToDataRowIds']:
771771
result[row['externalId']].append(row['dataRowId'])
772772
return result
773773

@@ -1074,7 +1074,7 @@ def _format_failed_rows(rows: Dict[str, str],
10741074
result_params = {
10751075
"jobId":
10761076
assign_global_keys_to_data_rows_job["assignGlobalKeysToDataRows"
1077-
]["jobId"]
1077+
]["jobId"]
10781078
}
10791079

10801080
# Poll job status until finished, then retrieve results

labelbox/schema/batch.py

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,12 @@ class Batch(DbObject):
3737
# Relationships
3838
created_by = Relationship.ToOne("User")
3939

40-
def __init__(self, client, project_id, *args, failed_data_row_ids=None, **kwargs):
40+
def __init__(self,
41+
client,
42+
project_id,
43+
*args,
44+
failed_data_row_ids=None,
45+
**kwargs):
4146
super().__init__(client, *args, **kwargs)
4247
self.project_id = project_id
4348
self._failed_data_row_ids = failed_data_row_ids
@@ -77,7 +82,7 @@ def remove_queued_data_rows(self) -> None:
7782
batch_id_param), {
7883
project_id_param: self.project_id,
7984
batch_id_param: self.uid
80-
},
85+
},
8186
experimental=True)
8287

8388
def export_data_rows(self,
@@ -146,8 +151,8 @@ def delete(self) -> None:
146151
batch_id_param), {
147152
project_id_param: self.project_id,
148153
batch_id_param: self.uid
149-
},
150-
experimental=True)
154+
},
155+
experimental=True)
151156

152157
def delete_labels(self, set_labels_as_template=False) -> None:
153158
""" Deletes labels that were created for data rows in the batch.
@@ -172,7 +177,7 @@ def delete_labels(self, set_labels_as_template=False) -> None:
172177
type_param:
173178
"RequeueDataWithLabelAsTemplate"
174179
if set_labels_as_template else "RequeueData"
175-
},
180+
},
176181
experimental=True)
177182
return res
178183

labelbox/schema/project.py

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,10 @@
99

1010
import ndjson
1111
import requests
12+
1213
from labelbox import utils
13-
from labelbox.exceptions import InvalidQueryError, LabelboxError, ProcessingWaitTimeout
14+
from labelbox.exceptions import (InvalidQueryError, LabelboxError,
15+
ProcessingWaitTimeout)
1416
from labelbox.orm import query
1517
from labelbox.orm.db_object import DbObject, Deletable, Updateable
1618
from labelbox.orm.model import Entity, Field, Relationship
@@ -631,7 +633,10 @@ def create_batch(self,
631633
experimental=True)["project"][method]
632634
batch = res['batch']
633635
batch['size'] = len(dr_ids)
634-
return Entity.Batch(self.client, self.uid, batch, failed_data_row_ids=res['failedDataRowIds'])
636+
return Entity.Batch(self.client,
637+
self.uid,
638+
batch,
639+
failed_data_row_ids=res['failedDataRowIds'])
635640

636641
def _update_queue_mode(self, mode: "QueueMode") -> "QueueMode":
637642
"""
@@ -984,11 +989,15 @@ def _is_url_valid(url: Union[str, Path]) -> bool:
984989
raise ValueError(
985990
f'Invalid annotations given of type: {type(annotations)}')
986991

987-
def _wait_until_data_rows_are_processed(self, data_row_ids: List[str], wait_processing_max_seconds: int, sleep_interval=30):
992+
def _wait_until_data_rows_are_processed(self,
993+
data_row_ids: List[str],
994+
wait_processing_max_seconds: int,
995+
sleep_interval=30):
988996
""" Wait until all the specified data rows are processed"""
989997
start_time = datetime.now()
990998
while True:
991-
if (datetime.now() - start_time).total_seconds() >= wait_processing_max_seconds:
999+
if (datetime.now() -
1000+
start_time).total_seconds() >= wait_processing_max_seconds:
9921001
raise ProcessingWaitTimeout(
9931002
"Maximum wait time exceeded while waiting for data rows to be processed. Try creating a batch a bit later"
9941003
)
@@ -1013,7 +1022,8 @@ def __check_data_rows_have_been_processed(self, data_row_ids: List[str]):
10131022
params = {}
10141023
params[data_row_ids_param] = data_row_ids
10151024
response = self.client.execute(query_str, params)
1016-
return response["queryAllDataRowsHaveBeenProcessed"]["allDataRowsHaveBeenProcessed"]
1025+
return response["queryAllDataRowsHaveBeenProcessed"][
1026+
"allDataRowsHaveBeenProcessed"]
10171027

10181028

10191029
class ProjectMember(DbObject):

tests/integration/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -190,6 +190,7 @@ def dataset(client, rand_gen):
190190
yield dataset
191191
dataset.delete()
192192

193+
193194
@pytest.fixture(scope='function')
194195
def unique_dataset(client, rand_gen):
195196
dataset = client.create_dataset(name=rand_gen(str))

tests/integration/test_batch.py

Lines changed: 15 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -97,50 +97,42 @@ def test_batch_project(batch_project: Project, small_dataset: Dataset):
9797

9898

9999
def test_batch_creation_for_data_rows_with_issues(
100-
batch_project: Project,
101-
small_dataset: Dataset,
102-
dataset_with_invalid_data_rows: Dataset
103-
):
100+
batch_project: Project, small_dataset: Dataset,
101+
dataset_with_invalid_data_rows: Dataset):
104102
"""
105103
Create a batch containing both valid and invalid data rows
106104
"""
107105
valid_data_rows = [dr.uid for dr in list(small_dataset.data_rows())]
108-
invalid_data_rows = [dr.uid for dr in list(
109-
dataset_with_invalid_data_rows.data_rows())]
106+
invalid_data_rows = [
107+
dr.uid for dr in list(dataset_with_invalid_data_rows.data_rows())
108+
]
110109
data_rows_to_add = valid_data_rows + invalid_data_rows
111110

112111
assert len(data_rows_to_add) == 5
113-
batch = batch_project.create_batch(
114-
"batch to test failed data rows",
115-
data_rows_to_add
116-
)
112+
batch = batch_project.create_batch("batch to test failed data rows",
113+
data_rows_to_add)
117114

118115
assert len(batch.failed_data_row_ids) == 2
119116

120117
failed_data_row_ids_set = set(batch.failed_data_row_ids)
121118
invalid_data_rows_set = set(invalid_data_rows)
122-
assert len(failed_data_row_ids_set.intersection(
123-
invalid_data_rows_set)) == 2
119+
assert len(failed_data_row_ids_set.intersection(invalid_data_rows_set)) == 2
124120

125121

126-
def test_batch_creation_with_processing_timeout(
127-
batch_project: Project,
128-
small_dataset: Dataset,
129-
unique_dataset: Dataset
130-
):
122+
def test_batch_creation_with_processing_timeout(batch_project: Project,
123+
small_dataset: Dataset,
124+
unique_dataset: Dataset):
131125
"""
132126
Create a batch with zero wait time, this means that the waiting logic will throw exception immediately
133127
"""
134128
# wait for these data rows to be processed
135129
valid_data_rows = [dr.uid for dr in list(small_dataset.data_rows())]
136130
batch_project._wait_until_data_rows_are_processed(
137-
valid_data_rows, wait_processing_max_seconds=3600, sleep_interval=5
138-
)
131+
valid_data_rows, wait_processing_max_seconds=3600, sleep_interval=5)
139132

140133
# upload data rows for this dataset and don't wait
141134
upload_invalid_data_rows_for_dataset(unique_dataset)
142-
unprocessed_data_rows = [dr.uid for dr in list(
143-
unique_dataset.data_rows())]
135+
unprocessed_data_rows = [dr.uid for dr in list(unique_dataset.data_rows())]
144136

145137
data_row_ids = valid_data_rows + unprocessed_data_rows
146138

@@ -149,10 +141,8 @@ def test_batch_creation_with_processing_timeout(
149141
# emulate the situation where there are still some data rows being
150142
# processed but wait timeout exceeded
151143
batch_project._wait_processing_max_seconds = 0
152-
batch_project.create_batch(
153-
"batch to test failed data rows",
154-
data_row_ids
155-
)
144+
batch_project.create_batch("batch to test failed data rows",
145+
data_row_ids)
156146
batch_project._wait_processing_max_seconds = stashed_wait_timeout
157147

158148

0 commit comments

Comments
 (0)