Skip to content

Commit ce83756

Browse files
author
gdj0nes
committed
CHG: property for batch size
1 parent 82468f1 commit ce83756

File tree

1 file changed

+15
-13
lines changed

1 file changed

+15
-13
lines changed

labelbox/schema/data_row_metadata.py

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,8 @@ class DataRowMetadataOntology:
106106
def __init__(self, client):
107107
self.client = client
108108

109+
self._batch_size = 50
110+
109111
# TODO: consider making these properties to stay in sync with server
110112
self._raw_ontology: Dict = self._get_ontology()
111113
# all fields
@@ -129,14 +131,14 @@ def __init__(self, client):
129131
]
130132
self.custom_id_index: Dict[SchemaId,
131133
DataRowMetadataSchema] = self._make_id_index(
132-
self.custom_fields)
134+
self.custom_fields)
133135
self.custom_name_index: Dict[str, DataRowMetadataSchema] = {
134136
f.name: f for f in self.custom_fields
135137
}
136138

137139
@staticmethod
138140
def _make_id_index(
139-
fields: List[DataRowMetadataSchema]
141+
fields: List[DataRowMetadataSchema]
140142
) -> Dict[SchemaId, DataRowMetadataSchema]:
141143
index = {}
142144
for f in fields:
@@ -185,9 +187,9 @@ def _parse_ontology(self):
185187
return fields
186188

187189
def parse_metadata(
188-
self, unparsed: List[Dict[str,
189-
List[Union[str,
190-
Dict]]]]) -> List[DataRowMetadata]:
190+
self, unparsed: List[Dict[str,
191+
List[Union[str,
192+
Dict]]]]) -> List[DataRowMetadata]:
191193
""" Parse metadata responses
192194
193195
>>> mdo.parse_metadata([datarow.metadata])
@@ -245,7 +247,7 @@ def bulk_upsert(
245247
raise ValueError("Empty list passed")
246248

247249
def _batch_upsert(
248-
upserts: List[_UpsertBatchDataRowMetadata]
250+
upserts: List[_UpsertBatchDataRowMetadata]
249251
) -> List[DataRowMetadataBatchResponse]:
250252

251253
query = """mutation UpsertDataRowMetadataBetaPyApi($metadata: [DataRowCustomMetadataBatchUpsertInput!]!) {
@@ -270,10 +272,10 @@ def _batch_upsert(
270272
chain.from_iterable(
271273
self._parse_upsert(m) for m in m.fields))).dict())
272274

273-
return _batch_operations(_batch_upsert, items)
275+
return _batch_operations(_batch_upsert, items, self._batch_size)
274276

275277
def bulk_delete(
276-
self, deletes: List[DeleteDataRowMetadata]
278+
self, deletes: List[DeleteDataRowMetadata]
277279
) -> List[DataRowMetadataBatchResponse]:
278280
""" Delete metadata from a datarow by specifiying the fields you want to remove
279281
@@ -299,7 +301,7 @@ def bulk_delete(
299301
raise ValueError("Empty list passed")
300302

301303
def _batch_delete(
302-
deletes: List[_DeleteBatchDataRowMetadata]
304+
deletes: List[_DeleteBatchDataRowMetadata]
303305
) -> List[DataRowMetadataBatchResponse]:
304306
query = """mutation DeleteDataRowMetadataBetaPyApi($deletes: [DataRowCustomMetadataBatchDeleteInput!]!) {
305307
deleteDataRowCustomMetadata(data: $deletes) {
@@ -318,7 +320,7 @@ def _batch_delete(
318320
for m in deletes:
319321
items.append(self._validate_delete(m))
320322

321-
return _batch_operations(_batch_delete, items)
323+
return _batch_operations(_batch_delete, items, batch_size=self._batch_size if self._batch_size < 25 else 25)
322324

323325
def _parse_upsert(
324326
self, metadatum: DataRowMetadataField
@@ -376,9 +378,9 @@ def _batch_items(iterable, size):
376378

377379

378380
def _batch_operations(
379-
batch_function: _BatchFunction,
380-
items: List,
381-
batch_size: int = 100,
381+
batch_function: _BatchFunction,
382+
items: List,
383+
batch_size: int = 100,
382384
):
383385
response = []
384386
for batch in _batch_items(items, batch_size):

0 commit comments

Comments
 (0)