99
1010from pydantic import BaseModel , conlist , constr
1111from labelbox .schema .identifiables import DataRowIdentifiers , UniqueIds
12+ from labelbox .schema .identifiable import UniqueId , GlobalKey
1213
1314from labelbox .schema .ontology import SchemaId
14- from labelbox .utils import _CamelCaseMixin , format_iso_datetime , format_iso_from_string
15+ from labelbox .utils import _CamelCaseMixin , camel_case , format_iso_datetime , format_iso_from_string
1516
1617
1718class DataRowMetadataKind (Enum ):
@@ -57,9 +58,12 @@ class DataRowMetadata(_CamelCaseMixin):
5758
5859
5960class DeleteDataRowMetadata (_CamelCaseMixin ):
60- data_row_id : str
61+ data_row_id : Union [ str , UniqueId , GlobalKey ]
6162 fields : List [SchemaId ]
6263
64+ class Config :
65+ arbitrary_types_allowed = True
66+
6367
6468class DataRowMetadataBatchResponse (_CamelCaseMixin ):
6569 global_key : Optional [str ]
@@ -86,9 +90,28 @@ class _UpsertBatchDataRowMetadata(_CamelCaseMixin):
8690
8791
8892class _DeleteBatchDataRowMetadata (_CamelCaseMixin ):
89- data_row_id : str
93+ data_row_identifier : Union [ UniqueId , GlobalKey ]
9094 schema_ids : List [SchemaId ]
9195
96+ class Config :
97+ arbitrary_types_allowed = True
98+ alias_generator = camel_case
99+
100+ def dict (self , * args , ** kwargs ):
101+ res = super ().dict (* args , ** kwargs )
102+ if 'data_row_identifier' in res .keys ():
103+ key = 'data_row_identifier'
104+ id_type_key = 'id_type'
105+ else :
106+ key = 'dataRowIdentifier'
107+ id_type_key = 'idType'
108+ data_row_identifier = res .pop (key )
109+ res [key ] = {
110+ "id" : data_row_identifier .key ,
111+ id_type_key : data_row_identifier .id_type
112+ }
113+ return res
114+
92115
93116_BatchInputs = Union [List [_UpsertBatchDataRowMetadata ],
94117 List [_DeleteBatchDataRowMetadata ]]
@@ -556,7 +579,17 @@ def bulk_delete(
556579 """ Delete metadata from a datarow by specifiying the fields you want to remove
557580
558581 >>> delete = DeleteDataRowMetadata(
559- >>> data_row_id="datarow-id",
582+ >>> data_row_id=UniqueId("datarow-id"),
583+ >>> fields=[
584+ >>> "schema-id-1",
585+ >>> "schema-id-2"
586+ >>> ...
587+ >>> ]
588+ >>> )
589+ >>> mdo.batch_delete([metadata])
590+
591+ >>> delete = DeleteDataRowMetadata(
592+ >>> data_row_id=GlobalKey("global-key"),
560593 >>> fields=[
561594 >>> "schema-id-1",
562595 >>> "schema-id-2"
@@ -565,8 +598,22 @@ def bulk_delete(
565598 >>> )
566599 >>> mdo.batch_delete([metadata])
567600
601+ >>> delete = DeleteDataRowMetadata(
602+ >>> data_row_id="global-key",
603+ >>> fields=[
604+ >>> "schema-id-1",
605+ >>> "schema-id-2"
606+ >>> ...
607+ >>> ]
608+ >>> )
609+ >>> mdo.batch_delete([metadata])
610+
611+
568612 Args:
569613 deletes: Data row and schema ids to delete
614+ For data row, we support UniqueId, str, and GlobalKey.
615+ If you pass a str, we will assume it is a UniqueId
616+ Do not pass a mix of data row ids and global keys in the same list
570617
571618 Returns:
572619 list of unsuccessful deletions.
@@ -575,13 +622,34 @@ def bulk_delete(
575622 """
576623
577624 if not len (deletes ):
578- raise ValueError ("Empty list passed" )
625+ raise ValueError ("The 'deletes' list cannot be empty." )
626+
627+ passed_strings = False
628+ for i , delete in enumerate (deletes ):
629+ if isinstance (delete .data_row_id , str ):
630+ passed_strings = True
631+ deletes [i ] = DeleteDataRowMetadata (data_row_id = UniqueId (
632+ delete .data_row_id ),
633+ fields = delete .fields )
634+ elif isinstance (delete .data_row_id , UniqueId ):
635+ continue
636+ elif isinstance (delete .data_row_id , GlobalKey ):
637+ continue
638+ else :
639+ raise ValueError (
640+ f"Invalid data row identifier type '{ type (delete .data_row_id )} ' for '{ delete .data_row_id } '"
641+ )
642+
643+ if passed_strings :
644+ warnings .warn (
645+ "Using string for data row id will be deprecated. Please use "
646+ "UniqueId instead." )
579647
580648 def _batch_delete (
581649 deletes : List [_DeleteBatchDataRowMetadata ]
582650 ) -> List [DataRowMetadataBatchResponse ]:
583- query = """mutation DeleteDataRowMetadataBetaPyApi($deletes: [DataRowCustomMetadataBatchDeleteInput!]! ) {
584- deleteDataRowCustomMetadata(data : $deletes) {
651+ query = """mutation DeleteDataRowMetadataBetaPyApi($deletes: [DataRowIdentifierCustomMetadataBatchDeleteInput!] ) {
652+ deleteDataRowCustomMetadata(dataRowIdentifiers : $deletes) {
585653 dataRowId
586654 error
587655 fields {
@@ -810,7 +878,7 @@ def _validate_delete(self, delete: DeleteDataRowMetadata):
810878 deletes .add (schema .uid )
811879
812880 return _DeleteBatchDataRowMetadata (
813- data_row_id = delete .data_row_id ,
881+ data_row_identifier = delete .data_row_id ,
814882 schema_ids = list (delete .fields )).dict (by_alias = True )
815883
816884 def _validate_custom_schema_by_name (self ,
0 commit comments