@@ -50,7 +50,6 @@ def id(self):
5050
5151
5252class _CamelCaseMixin (BaseModel ):
53-
5453 class Config :
5554 allow_population_by_field_name = True
5655 alias_generator = camel_case
@@ -158,7 +157,7 @@ def _make_name_index(fields: List[DataRowMetadataSchema]):
158157
159158 @staticmethod
160159 def _make_id_index (
161- fields : List [DataRowMetadataSchema ]
160+ fields : List [DataRowMetadataSchema ]
162161 ) -> Dict [SchemaId , DataRowMetadataSchema ]:
163162 index = {}
164163 for f in fields :
@@ -198,7 +197,7 @@ def _parse_ontology(self) -> List[DataRowMetadataSchema]:
198197 DataRowMetadataSchema (** {
199198 ** option ,
200199 ** {
201- "parent" : schema ["id " ]
200+ "parent" : schema ["uid " ]
202201 }
203202 }))
204203 schema ["options" ] = options
@@ -207,9 +206,9 @@ def _parse_ontology(self) -> List[DataRowMetadataSchema]:
207206 return fields
208207
209208 def parse_metadata (
210- self , unparsed : List [Dict [str ,
211- List [Union [str ,
212- Dict ]]]]) -> List [DataRowMetadata ]:
209+ self , unparsed : List [Dict [str ,
210+ List [Union [str ,
211+ Dict ]]]]) -> List [DataRowMetadata ]:
213212 """ Parse metadata responses
214213
215214 >>> mdo.parse_metadata([metdata])
@@ -270,7 +269,7 @@ def bulk_upsert(
270269 raise ValueError ("Empty list passed" )
271270
272271 def _batch_upsert (
273- upserts : List [_UpsertBatchDataRowMetadata ]
272+ upserts : List [_UpsertBatchDataRowMetadata ]
274273 ) -> List [DataRowMetadataBatchResponse ]:
275274 query = """mutation UpsertDataRowMetadataBetaPyApi($metadata: [DataRowCustomMetadataBatchUpsertInput!]!) {
276275 upsertDataRowCustomMetadata(data: $metadata){
@@ -303,13 +302,13 @@ def _batch_upsert(
303302 fields = list (
304303 chain .from_iterable (
305304 self ._parse_upsert (m ) for m in m .fields ))).dict (
306- by_alias = True ))
305+ by_alias = True ))
307306
308307 res = _batch_operations (_batch_upsert , items , self ._batch_size )
309308 return res
310309
311310 def bulk_delete (
312- self , deletes : List [DeleteDataRowMetadata ]
311+ self , deletes : List [DeleteDataRowMetadata ]
313312 ) -> List [DataRowMetadataBatchResponse ]:
314313 """ Delete metadata from a datarow by specifiying the fields you want to remove
315314
@@ -336,7 +335,7 @@ def bulk_delete(
336335 raise ValueError ("Empty list passed" )
337336
338337 def _batch_delete (
339- deletes : List [_DeleteBatchDataRowMetadata ]
338+ deletes : List [_DeleteBatchDataRowMetadata ]
340339 ) -> List [DataRowMetadataBatchResponse ]:
341340 query = """mutation DeleteDataRowMetadataBetaPyApi($deletes: [DataRowCustomMetadataBatchDeleteInput!]!) {
342341 deleteDataRowCustomMetadata(data: $deletes) {
@@ -415,6 +414,8 @@ def _parse_upsert(
415414 parsed = _validate_parse_datetime (metadatum )
416415 elif schema .kind == DataRowMetadataKind .string :
417416 parsed = _validate_parse_text (metadatum )
417+ elif schema .kind == DataRowMetadataKind .number :
418+ parsed = _validate_parse_number (metadatum )
418419 elif schema .kind == DataRowMetadataKind .embedding :
419420 parsed = _validate_parse_embedding (metadatum )
420421 elif schema .kind == DataRowMetadataKind .enum :
@@ -455,9 +456,9 @@ def _batch_items(iterable: List[Any], size: int) -> Generator[Any, None, None]:
455456
456457
457458def _batch_operations (
458- batch_function : _BatchFunction ,
459- items : List ,
460- batch_size : int = 100 ,
459+ batch_function : _BatchFunction ,
460+ items : List ,
461+ batch_size : int = 100 ,
461462):
462463 response = []
463464
@@ -472,6 +473,12 @@ def _validate_parse_embedding(
472473 return [field .dict (by_alias = True )]
473474
474475
476+ def _validate_parse_number (
477+ field : DataRowMetadataField
478+ ) -> List [Dict [str , Union [SchemaId , Number ]]]:
479+ return [field .dict (by_alias = True )]
480+
481+
475482def _validate_parse_datetime (
476483 field : DataRowMetadataField ) -> List [Dict [str , Union [SchemaId , str ]]]:
477484 # TODO: better validate tzinfo
0 commit comments