Skip to content

Commit a6d8c4c

Browse files
author
Kevin Kim
committed
Allow create_data_row() to take in dictionary
1 parent e10c156 commit a6d8c4c

File tree

1 file changed

+26
-10
lines changed

1 file changed

+26
-10
lines changed

labelbox/schema/dataset.py

Lines changed: 26 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -52,40 +52,56 @@ class Dataset(DbObject, Updateable, Deletable):
5252
iam_integration = Relationship.ToOne("IAMIntegration", False,
5353
"iam_integration", "signer")
5454

55-
def create_data_row(self, **kwargs) -> "DataRow":
55+
def create_data_row(self, items = None, **kwargs) -> "DataRow":
5656
""" Creates a single DataRow belonging to this dataset.
5757
5858
>>> dataset.create_data_row(row_data="http://my_site.com/photos/img_01.jpg")
5959
6060
Args:
61+
items: Dictionary containing new `DataRow` data. At a minimum,
62+
must contain `row_data` or `DataRow.row_data`.
6163
**kwargs: Key-value arguments containing new `DataRow` data. At a minimum,
6264
must contain `row_data`.
6365
6466
Raises:
67+
InvalidQueryError: If both dictionary and `kwargs` are provided as inputs
6568
InvalidQueryError: If `DataRow.row_data` field value is not provided
6669
in `kwargs`.
6770
InvalidAttributeError: in case the DB object type does not contain
6871
any of the field names given in `kwargs`.
6972
7073
"""
74+
def convert_field_keys(items):
75+
return {
76+
key.name if isinstance(key, Field) else key: value
77+
for key, value in items.items()
78+
}
79+
80+
if items is not None and len(kwargs) > 0:
81+
raise InvalidQueryError(
82+
"Argument to create_data_row() must be either a dictionary, or kwargs containing `row_data` at minimum"
83+
)
84+
7185
DataRow = Entity.DataRow
72-
if DataRow.row_data.name not in kwargs:
86+
args = convert_field_keys(items) if items is not None else kwargs
87+
88+
if DataRow.row_data.name not in args:
7389
raise InvalidQueryError(
7490
"DataRow.row_data missing when creating DataRow.")
7591

7692
# If row data is a local file path, upload it to server.
77-
row_data = kwargs[DataRow.row_data.name]
93+
row_data = args[DataRow.row_data.name]
7894
if os.path.exists(row_data):
79-
kwargs[DataRow.row_data.name] = self.client.upload_file(row_data)
80-
kwargs[DataRow.dataset.name] = self
95+
args[DataRow.row_data.name] = self.client.upload_file(row_data)
96+
args[DataRow.dataset.name] = self
8197

8298
# Parse metadata fields, if they are provided
83-
if DataRow.custom_metadata.name in kwargs:
99+
if DataRow.custom_metadata.name in args:
84100
mdo = self.client.get_data_row_metadata_ontology()
85-
kwargs[DataRow.custom_metadata.name] = mdo.parse_upsert_metadata(
86-
kwargs[DataRow.custom_metadata.name])
87-
88-
return self.client._create(DataRow, kwargs)
101+
args[DataRow.custom_metadata.name] = mdo.parse_upsert_metadata(
102+
args[DataRow.custom_metadata.name])
103+
return self.client._create(DataRow, args)
104+
89105

90106
def create_data_rows_sync(self, items) -> None:
91107
""" Synchronously bulk upload data rows.

0 commit comments

Comments
 (0)