Skip to content

Commit 34db8af

Browse files
committed
Update tests
1 parent 6eba50a commit 34db8af

File tree

3 files changed

+41
-17
lines changed

3 files changed

+41
-17
lines changed

labelbox/schema/export_params.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
import sys
2-
31
from typing import Optional
42
if sys.version_info >= (3, 8):
53
from typing import TypedDict

labelbox/schema/project.py

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from labelbox.pagination import PaginatedCollection
2121
from labelbox.schema.consensus_settings import ConsensusSettings
2222
from labelbox.schema.data_row import DataRow
23-
from labelbox.schema.export_params import ModelRunExportParams
23+
from labelbox.schema.export_params import ProjectExportParams
2424
from labelbox.schema.media_type import MediaType
2525
from labelbox.schema.queue_mode import QueueMode
2626
from labelbox.schema.resource_tag import ResourceTag
@@ -287,7 +287,7 @@ def label_generator(self, timeout_seconds=600, **kwargs):
287287
return LBV1Converter.deserialize_video(json_data, self.client)
288288
return LBV1Converter.deserialize(json_data)
289289

290-
def export_labels_v2(
290+
def export_labels(
291291
self,
292292
download=False,
293293
timeout_seconds=1800,
@@ -379,23 +379,22 @@ def _validate_datetime(string_date: str) -> bool:
379379
"""
380380
Creates a project run export task with the given params and returns the task.
381381
382-
>>> export_task = export_labels_v2("my_export_task", filter={"media_attributes": True})
382+
>>> export_task = export_v2("my_export_task", filter={"media_attributes": True})
383383
384384
"""
385385

386-
def export_labels_v2(self, task_name: str,
387-
params: Optional[ModelRunExportParams]) -> Task:
386+
def export_v2(self, task_name: str,
387+
params: Optional[ProjectExportParams]) -> Task:
388388
_params = params or {}
389-
mutation_name = "exportDataRows"
390-
create_task_query_str = """mutation exportDataRowsPyApi($input: ExportDataRowsInput!){
389+
mutation_name = "exportDataRowsInProject"
390+
create_task_query_str = """mutation exportDataRowsInProjectPyApi($input: ExportDataRowsInProjectInput!){
391391
%s(input: $input) {taskId} }
392392
""" % (mutation_name)
393393
params = {
394394
"input": {
395395
"taskName": task_name,
396396
"filters": {
397-
"modelRunIds": [],
398-
"projectIds": [self.uid]
397+
"projectId": self.uid
399398
},
400399
"params": {
401400
"includeAttachments":
@@ -412,9 +411,6 @@ def export_labels_v2(self, task_name: str,
412411
_params.get('include_labels', False),
413412
"includePerformanceDetails":
414413
_params.get('include_performance_details', False),
415-
# Arguments locked based on exectuion context
416-
"includeModelRuns":
417-
False,
418414
},
419415
}
420416
}

tests/integration/test_label.py

Lines changed: 33 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import json
12
import time
23

34
import pytest
@@ -43,18 +44,47 @@ def test_label_export(configured_project_with_label):
4344
# The new exporter doesn't work with the create_label mutation
4445

4546

46-
def test_label_export_v2(configured_project_with_label):
47+
def test_export_v2(configured_project_with_label):
4748
project, _, _, label = configured_project_with_label
4849
label_id = label.uid
4950
# Wait for exporter to retrieve latest labels
5051
time.sleep(10)
5152
task_name = "test_label_export_v2"
5253

53-
task = project.export_labels_v2(task_name, filter={"project_details": True})
54+
# TODO: Right now we don't have a way to test this
55+
include_performance_details = True
56+
task = project.export_v2(
57+
task_name,
58+
params={
59+
"include_performance_details": include_performance_details,
60+
"include_labels": True
61+
})
5462
assert task.name == task_name
5563
task.wait_till_done()
5664
assert task.status == "COMPLETE"
57-
# TODO: Download result and check it
65+
66+
def download_result(result_url):
67+
response = requests.get(result_url)
68+
response.raise_for_status()
69+
data = [json.loads(line) for line in response.text.splitlines()]
70+
return data
71+
72+
task_results = download_result(task.result_url)
73+
74+
for task_result in task_results:
75+
assert len(task_result['errors']) == 0
76+
task_project = task_result['projects'][project.uid]
77+
task_project_label_ids_set = set(
78+
map(lambda prediction: prediction['id'], task_project['labels']))
79+
assert label_id in task_project_label_ids_set
80+
81+
# TODO: Add back in when we have a way to test this
82+
# if include_performance_details:
83+
# assert 'include_performance_details' in task_result and task_result[
84+
# 'include_performance_details'] is not None
85+
# else:
86+
# assert 'include_performance_details' not in task_result or task_result[
87+
# 'include_performance_details'] is None
5888

5989

6090
# TODO: Skipping this test in staging due to label not updating

0 commit comments

Comments
 (0)