Skip to content

Commit 64947b1

Browse files
author
Kevin Kim
committed
Reduce wait time for DR processing in tests
1 parent ee9311c commit 64947b1

File tree

5 files changed

+23
-10
lines changed

5 files changed

+23
-10
lines changed

.github/workflows/python-package.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,4 +89,4 @@ jobs:
8989

9090
DA_GCP_LABELBOX_API_KEY: ${{ secrets[matrix.da-test-key] }}
9191
run: |
92-
tox -e py -- -n 10 -svv --reruns 5 --reruns-delay 10
92+
tox -e py -- -n 10 -svv --reruns 3 --reruns-delay 3

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
repos:
22
- repo: https://github.com/google/yapf
3-
rev: v0.44.0
3+
rev: v0.40.1
44
hooks:
55
- id: yapf
66
name: "yapf"

tests/integration/annotation_import/conftest.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,9 @@
1111
from labelbox.schema.annotation_import import LabelImport, AnnotationImportState
1212
from labelbox.schema.queue_mode import QueueMode
1313

14+
DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS = 30
15+
DATA_ROW_PROCESSING_WAIT_SLEEP_INTERNAL_SECONDS = 5
16+
1417

1518
@pytest.fixture()
1619
def audio_data_row(rand_gen):
@@ -486,7 +489,10 @@ def configured_project(client, ontology, rand_gen, image_url):
486489

487490
for _ in range(len(ontology['tools']) + len(ontology['classifications'])):
488491
data_row_ids.append(dataset.create_data_row(row_data=image_url).uid)
489-
project._wait_until_data_rows_are_processed(data_row_ids=data_row_ids)
492+
project._wait_until_data_rows_are_processed(
493+
data_row_ids=data_row_ids,
494+
wait_processing_max_seconds=DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS,
495+
sleep_interval=DATA_ROW_PROCESSING_WAIT_SLEEP_INTERNAL_SECONDS)
490496
project.datasets.connect(dataset)
491497
project.data_row_ids = data_row_ids
492498
yield project
@@ -505,7 +511,10 @@ def configured_project_pdf(client, ontology, rand_gen, pdf_url):
505511
project.setup(editor, ontology)
506512
data_row_ids = []
507513
data_row_ids.append(dataset.create_data_row(pdf_url).uid)
508-
project._wait_until_data_rows_are_processed(data_row_ids=data_row_ids)
514+
project._wait_until_data_rows_are_processed(
515+
data_row_ids=data_row_ids,
516+
wait_processing_max_seconds=DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS,
517+
sleep_interval=DATA_ROW_PROCESSING_WAIT_SLEEP_INTERNAL_SECONDS)
509518
project.datasets.connect(dataset)
510519
project.data_row_ids = data_row_ids
511520
yield project

tests/integration/conftest.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@
2222
from labelbox.schema.user import User
2323

2424
IMG_URL = "https://picsum.photos/200/300.jpg"
25+
DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS = 30
26+
DATA_ROW_PROCESSING_WAIT_SLEEP_INTERNAL_SECONDS = 5
2527

2628

2729
class Environ(Enum):
@@ -398,7 +400,10 @@ def configured_batch_project_with_label(client, rand_gen, image_url,
398400
One label is already created and yielded when using fixture
399401
"""
400402
data_rows = [dr.uid for dr in list(dataset.data_rows())]
401-
batch_project._wait_until_data_rows_are_processed(data_row_ids=data_rows)
403+
batch_project._wait_until_data_rows_are_processed(
404+
data_row_ids=data_rows,
405+
wait_processing_max_seconds=DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS,
406+
sleep_interval=DATA_ROW_PROCESSING_WAIT_SLEEP_INTERNAL_SECONDS)
402407
batch_project.create_batch("test-batch", data_rows)
403408

404409
ontology = _setup_ontology(batch_project)
@@ -421,7 +426,10 @@ def configured_batch_project_with_multiple_datarows(batch_project, dataset,
421426
"""
422427
global_keys = [dr.global_key for dr in data_rows]
423428

424-
batch_project._wait_until_data_rows_are_processed(global_keys=global_keys)
429+
batch_project._wait_until_data_rows_are_processed(
430+
global_keys=global_keys,
431+
wait_processing_max_seconds=DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS,
432+
sleep_interval=DATA_ROW_PROCESSING_WAIT_SLEEP_INTERNAL_SECONDS)
425433
batch_name = f'batch {uuid.uuid4()}'
426434
batch_project.create_batch(batch_name, global_keys=global_keys)
427435

tests/integration/test_batch.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -57,8 +57,6 @@ def test_create_batch(batch_project: Project, big_dataset: Dataset):
5757

5858
def test_create_batch_async(batch_project: Project, big_dataset: Dataset):
5959
data_rows = [dr.uid for dr in list(big_dataset.export_data_rows())]
60-
batch_project._wait_until_data_rows_are_processed(
61-
data_rows, batch_project._wait_processing_max_seconds)
6260
batch = batch_project._create_batch_async("big-batch",
6361
data_rows,
6462
priority=3)
@@ -146,8 +144,6 @@ def test_batch_creation_with_processing_timeout(batch_project: Project,
146144
"""
147145
# wait for these data rows to be processed
148146
valid_data_rows = [dr.uid for dr in list(small_dataset.data_rows())]
149-
batch_project._wait_until_data_rows_are_processed(
150-
valid_data_rows, wait_processing_max_seconds=3600, sleep_interval=5)
151147

152148
# upload data rows for this dataset and don't wait
153149
upload_invalid_data_rows_for_dataset(unique_dataset)

0 commit comments

Comments
 (0)