Skip to content

Commit 4f8a0d0

Browse files
authored
Add tests for export v2 timestamp exports (#1065)
1 parent a597b4d commit 4f8a0d0

File tree

2 files changed

+42
-0
lines changed

2 files changed

+42
-0
lines changed

tests/integration/annotation_import/test_data_types.py

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import datetime
12
import itertools
23
import time
34
import pytest
@@ -6,6 +7,7 @@
67
import labelbox as lb
78
from labelbox.data.annotation_types.data.video import VideoData
89
from labelbox.schema.data_row import DataRow
10+
from labelbox.schema.media_type import MediaType
911
import labelbox.types as lb_types
1012
from labelbox.data.annotation_types.data import AudioData, ConversationData, DicomData, DocumentData, HTMLData, ImageData, TextData
1113
from labelbox.data.serialization import NDJsonConverter
@@ -153,6 +155,18 @@ def test_import_data_types(client, configured_project,
153155
data_row.delete()
154156

155157

158+
def validate_iso_format(date_string: str):
159+
parsed_t = datetime.datetime.fromisoformat(
160+
date_string) #this will blow up if the string is not in iso format
161+
assert parsed_t.hour is not None
162+
assert parsed_t.minute is not None
163+
assert parsed_t.second is not None
164+
165+
166+
def to_pascal_case(name: str) -> str:
167+
return "".join([word.capitalize() for word in name.split("_")])
168+
169+
156170
@pytest.mark.parametrize('data_type_class', [
157171
AudioData, HTMLData, ImageData, TextData, VideoData, ConversationData,
158172
DocumentData, DicomData
@@ -166,6 +180,12 @@ def test_import_data_types_v2(client, configured_project,
166180
project_id = configured_project.uid
167181

168182
data_type_string = data_type_class.__name__[:-4].lower()
183+
184+
media_type = to_pascal_case(data_type_string)
185+
if media_type == 'Conversation':
186+
media_type = 'Conversational'
187+
configured_project.update(media_type=MediaType[media_type])
188+
169189
data_row_ndjson = data_row_json_by_data_type[data_type_string]
170190
dataset = next(configured_project.datasets())
171191
data_row = dataset.create_data_row(data_row_ndjson)
@@ -187,10 +207,28 @@ def test_import_data_types_v2(client, configured_project,
187207
assert label_import.state == AnnotationImportState.FINISHED
188208
assert len(label_import.errors) == 0
189209

210+
for label in configured_project.labels(): #trigger review creation
211+
label.create_review(score=1.0)
212+
213+
#TODO need to migrate project to the new BATCH mode and change this code
214+
# to be similar to tests/integration/test_task_queue.py
215+
190216
result = export_v2_test_helpers.run_project_export_v2_task(
191217
configured_project)
192218
exported_data = result[0]
193219

220+
# timestamp fields are in iso format
221+
validate_iso_format(exported_data['data_row']['details']['created_at'])
222+
validate_iso_format(exported_data['data_row']['details']['updated_at'])
223+
validate_iso_format(exported_data['projects'][project_id]['labels'][0]
224+
['label_details']['created_at'])
225+
validate_iso_format(exported_data['projects'][project_id]['labels'][0]
226+
['label_details']['updated_at'])
227+
validate_iso_format(exported_data['projects'][project_id]['labels'][0]
228+
['label_details']['reviews'][0]['reviewed_at'])
229+
# to be added once we have switched to the new BATCH mode
230+
# validate_iso_format(exported_data['projects'][project_id]['project_details']['workflow_history'][0]['created_at'])
231+
194232
assert (exported_data['data_row']['id'] == data_row.uid)
195233
exported_project = exported_data['projects'][project_id]
196234
exported_project_labels = exported_project['labels'][0]

tests/integration/conftest.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -534,6 +534,8 @@ def configured_project_with_complex_ontology(client, rand_gen, image_url):
534534
project.delete()
535535

536536

537+
# NOTE this is nice heuristics, also there is this logic _wait_until_data_rows_are_processed in Project
538+
# in case we still have flakiness in the future, we can use it
537539
@pytest.fixture
538540
def wait_for_data_row_processing():
539541
"""
@@ -642,7 +644,9 @@ def run_project_export_v2_task(cls,
642644
params={}):
643645
task = None
644646
params = params if params else {
647+
"project_details": True,
645648
"performance_details": False,
649+
"data_row_details": True,
646650
"label_details": True
647651
}
648652
while (num_retries > 0):

0 commit comments

Comments
 (0)