Skip to content

Commit 95f7964

Browse files
committed
AL-4081: Addressed comments
1 parent 6041a26 commit 95f7964

File tree

2 files changed

+11
-7
lines changed

2 files changed

+11
-7
lines changed

labelbox/schema/project.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -603,7 +603,7 @@ def create_batch(self,
603603
query_str = """mutation %sPyApi($projectId: ID!, $batchInput: CreateBatchInput!) {
604604
project(where: {id: $projectId}) {
605605
%s(input: $batchInput) {
606-
batch{
606+
batch {
607607
%s
608608
}
609609
failedDataRowIds

tests/integration/test_batch.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -41,8 +41,8 @@ def dataset_with_invalid_data_rows(unique_dataset: Dataset):
4141
def upload_invalid_data_rows_for_dataset(dataset: Dataset):
4242
task = dataset.create_data_rows([
4343
{
44-
"row_data": 'https://jakub-da-test-primary.s3.us-east-2.amazonaws.com/dogecoin-whitepaper.pdf',
45-
"external_id": "my-pdf"
44+
"row_data": 'gs://lb-test-private/mask-2.png', # forbidden
45+
"external_id": "image-without-access.jpg"
4646
},
4747
] * 2)
4848
task.wait_till_done()
@@ -104,9 +104,9 @@ def test_batch_creation_for_data_rows_with_issues(
104104
"""
105105
Create a batch containing both valid and invalid data rows
106106
"""
107-
valid_data_rows = [dr.uid for dr in list(small_dataset.export_data_rows())]
107+
valid_data_rows = [dr.uid for dr in list(small_dataset.data_rows())]
108108
invalid_data_rows = [dr.uid for dr in list(
109-
dataset_with_invalid_data_rows.export_data_rows())]
109+
dataset_with_invalid_data_rows.data_rows())]
110110
data_rows_to_add = valid_data_rows + invalid_data_rows
111111

112112
assert len(data_rows_to_add) == 5
@@ -132,24 +132,28 @@ def test_batch_creation_with_processing_timeout(
132132
Create a batch with zero wait time, this means that the waiting logic will throw exception immediately
133133
"""
134134
# wait for these data rows to be processed
135-
valid_data_rows = [dr.uid for dr in list(small_dataset.export_data_rows())]
135+
valid_data_rows = [dr.uid for dr in list(small_dataset.data_rows())]
136136
batch_project._wait_until_data_rows_are_processed(
137137
valid_data_rows, wait_processing_max_seconds=3600, sleep_interval=5
138138
)
139139

140140
# upload data rows for this dataset and don't wait
141141
upload_invalid_data_rows_for_dataset(unique_dataset)
142142
unprocessed_data_rows = [dr.uid for dr in list(
143-
unique_dataset.export_data_rows())]
143+
unique_dataset.data_rows())]
144144

145145
data_row_ids = valid_data_rows + unprocessed_data_rows
146146

147+
stashed_wait_timeout = batch_project._wait_processing_max_seconds
147148
with pytest.raises(ProcessingWaitTimeout):
149+
# emulate the situation where there are still some data rows being
150+
# processed but wait timeout exceeded
148151
batch_project._wait_processing_max_seconds = 0
149152
batch_project.create_batch(
150153
"batch to test failed data rows",
151154
data_row_ids
152155
)
156+
batch_project._wait_processing_max_seconds = stashed_wait_timeout
153157

154158

155159
def test_export_data_rows(batch_project: Project, dataset: Dataset):

0 commit comments

Comments
 (0)