@@ -260,10 +260,10 @@ def create_data_rows_sync(self, items) -> None:
260260 url_param : descriptor_url
261261 })
262262
263- def create_data_rows (
264- self ,
265- items ,
266- file_upload_thread_count = FILE_UPLOAD_THREAD_COUNT ) -> "Task " :
263+ def create_data_rows (self ,
264+ items ,
265+ file_upload_thread_count = FILE_UPLOAD_THREAD_COUNT
266+ ) -> "DataUpsertTask " :
267267 """ Asynchronously bulk upload data rows
268268
269269 Use this instead of `Dataset.create_data_rows_sync` uploads for batches that contain more than 1000 data rows.
@@ -576,10 +576,10 @@ def _export(
576576 is_streamable = res ["isStreamable" ]
577577 return Task .get_task (self .client , task_id ), is_streamable
578578
579- def upsert_data_rows (
580- self ,
581- items ,
582- file_upload_thread_count = FILE_UPLOAD_THREAD_COUNT ) -> "Task " :
579+ def upsert_data_rows (self ,
580+ items ,
581+ file_upload_thread_count = FILE_UPLOAD_THREAD_COUNT
582+ ) -> "DataUpsertTask " :
583583 """
584584 Upserts data rows in this dataset. When "key" is provided, and it references an existing data row,
585585 an update will be performed. When "key" is not provided a new data row will be created.
@@ -610,18 +610,14 @@ def upsert_data_rows(
610610 >>> ])
611611 >>> task.wait_till_done()
612612 """
613- if len (items ) > MAX_DATAROW_PER_API_OPERATION :
614- raise MalformedQueryException (
615- f"Cannot upsert more than { MAX_DATAROW_PER_API_OPERATION } DataRows per function call."
616- )
617-
618613 specs = DataRowUpsertItem .build (self .uid , items )
619614 return self ._exec_upsert_data_rows (specs , file_upload_thread_count )
620615
621616 def _exec_upsert_data_rows (
622- self ,
623- specs : List [DataRowItemBase ],
624- file_upload_thread_count : int = FILE_UPLOAD_THREAD_COUNT ) -> "Task" :
617+ self ,
618+ specs : List [DataRowItemBase ],
619+ file_upload_thread_count : int = FILE_UPLOAD_THREAD_COUNT
620+ ) -> "DataUpsertTask" :
625621
626622 manifest = DataRowUploader .upload_in_chunks (
627623 client = self .client ,
0 commit comments