1515from labelbox .exceptions import InvalidQueryError , LabelboxError , ResourceNotFoundError , InvalidAttributeError
1616from labelbox .orm .db_object import DbObject , Updateable , Deletable
1717from labelbox .orm .model import Entity , Field , Relationship
18+ from labelbox .orm import query
1819from labelbox .exceptions import MalformedQueryException
1920
2021if TYPE_CHECKING :
@@ -95,18 +96,46 @@ def convert_field_keys(items):
9596 raise InvalidQueryError (
9697 "DataRow.row_data missing when creating DataRow." )
9798
98- # If row data is a local file path, upload it to server.
9999 row_data = args [DataRow .row_data .name ]
100- if os .path .exists (row_data ):
100+ if not isinstance (row_data , str ):
101+ # If the row data is an object, upload as a string
102+ args [DataRow .row_data .name ] = json .dumps (row_data )
103+ elif os .path .exists (row_data ):
104+ # If row data is a local file path, upload it to server.
101105 args [DataRow .row_data .name ] = self .client .upload_file (row_data )
102- args [DataRow .dataset .name ] = self
103106
104107 # Parse metadata fields, if they are provided
105108 if DataRow .metadata_fields .name in args :
106109 mdo = self .client .get_data_row_metadata_ontology ()
107110 args [DataRow .metadata_fields .name ] = mdo .parse_upsert_metadata (
108111 args [DataRow .metadata_fields .name ])
109- return self .client ._create (DataRow , args )
112+
113+ query_str = """mutation CreateDataRowPyApi(
114+ $row_data: String!,
115+ $metadata_fields: [DataRowCustomMetadataUpsertInput!],
116+ $attachments: [DataRowAttachmentInput!],
117+ $media_type : MediaType,
118+ $external_id : String,
119+ $global_key : String,
120+ $dataset: ID!
121+ ){
122+ createDataRow(
123+ data:
124+ {
125+ rowData: $row_data
126+ mediaType: $media_type
127+ metadataFields: $metadata_fields
128+ externalId: $external_id
129+ globalKey: $global_key
130+ attachments: $attachments
131+ dataset: {connect: {id: $dataset}}
132+ }
133+ )
134+ {%s}
135+ }
136+ """ % query .results_query_part (Entity .DataRow )
137+ res = self .client .execute (query_str , {** args , 'dataset' : self .uid })
138+ return DataRow (self .client , res ['createDataRow' ])
110139
111140 def create_data_rows_sync (self , items ) -> None :
112141 """ Synchronously bulk upload data rows.
@@ -229,8 +258,8 @@ def _create_descriptor_file(self, items, max_attachments_per_data_row=None):
229258 >>> {DataRow.row_data:"http://my_site.com/photos/img_01.jpg"},
230259 >>> {DataRow.row_data:"/path/to/file1.jpg"},
231260 >>> "path/to/file2.jpg",
232- >>> {"tileLayerUrl" : "http://", ...}
233- >>> {"conversationalData " : [ ...], ...}
261+ >>> {DataRow.row_data: { "tileLayerUrl" : "http://", ...} }
262+ >>> {DataRow.row_data: {"type " : ..., 'version' : ..., 'messages' : [...]} }
234263 >>> ])
235264
236265 For an example showing how to upload tiled data_rows see the following notebook:
@@ -258,7 +287,7 @@ def _create_descriptor_file(self, items, max_attachments_per_data_row=None):
258287
259288 def upload_if_necessary (item ):
260289 row_data = item ['row_data' ]
261- if os .path .exists (row_data ):
290+ if isinstance ( row_data , str ) and os .path .exists (row_data ):
262291 item_url = self .client .upload_file (row_data )
263292 item ['row_data' ] = item_url
264293 if 'external_id' not in item :
@@ -341,40 +370,39 @@ def validate_keys(item):
341370 "`row_data` missing when creating DataRow." )
342371
343372 invalid_keys = set (item ) - {
344- * {f .name for f in DataRow .fields ()}, 'attachments'
373+ * {f .name for f in DataRow .fields ()}, 'attachments' , 'media_type'
345374 }
346375 if invalid_keys :
347376 raise InvalidAttributeError (DataRow , invalid_keys )
348377 return item
349378
379+ def formatLegacyConversationalData (item ):
380+ messages = item .pop ("conversationalData" )
381+ version = item .pop ("version" , 1 )
382+ type = item .pop ("type" , "application/vnd.labelbox.conversational" )
383+ if "externalId" in item :
384+ external_id = item .pop ("externalId" )
385+ item ["external_id" ] = external_id
386+ if "globalKey" in item :
387+ global_key = item .pop ("globalKey" )
388+ item ["globalKey" ] = global_key
389+ validate_conversational_data (messages )
390+ one_conversation = \
391+ {
392+ "type" : type ,
393+ "version" : version ,
394+ "messages" : messages
395+ }
396+ item ["row_data" ] = one_conversation
397+ return item
398+
350399 def convert_item (item ):
351- # Don't make any changes to tms data
352400 if "tileLayerUrl" in item :
353401 validate_attachments (item )
354402 return item
355403
356404 if "conversationalData" in item :
357- messages = item .pop ("conversationalData" )
358- version = item .pop ("version" )
359- type = item .pop ("type" )
360- if "externalId" in item :
361- external_id = item .pop ("externalId" )
362- item ["external_id" ] = external_id
363- if "globalKey" in item :
364- global_key = item .pop ("globalKey" )
365- item ["globalKey" ] = global_key
366- validate_conversational_data (messages )
367- one_conversation = \
368- {
369- "type" : type ,
370- "version" : version ,
371- "messages" : messages
372- }
373- conversationUrl = self .client .upload_data (
374- json .dumps (one_conversation ),
375- content_type = "application/json" ,
376- filename = "conversational_data.json" )
377- item ["row_data" ] = conversationUrl
405+ formatLegacyConversationalData (item )
378406
379407 # Convert all payload variations into the same dict format
380408 item = format_row (item )
@@ -386,11 +414,7 @@ def convert_item(item):
386414 parse_metadata_fields (item )
387415 # Upload any local file paths
388416 item = upload_if_necessary (item )
389-
390- return {
391- "data" if key == "row_data" else utils .camel_case (key ): value
392- for key , value in item .items ()
393- }
417+ return item
394418
395419 if not isinstance (items , Iterable ):
396420 raise ValueError (
0 commit comments