Skip to content

Commit 6ed4c46

Browse files
committed
Add transformers for JSON object processing
1 parent a37696d commit 6ed4c46

File tree

8 files changed

+124
-244
lines changed

8 files changed

+124
-244
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,3 +99,4 @@ dask-worker-space
9999
singlestoredb/mysql/tests/databases.json
100100

101101
trees
102+
CODIFI_**

AC_PLAN.md

Lines changed: 0 additions & 35 deletions
This file was deleted.

AC_PROMPT.md

Lines changed: 0 additions & 55 deletions
This file was deleted.

AC_SPEC.md

Lines changed: 0 additions & 20 deletions
This file was deleted.

singlestoredb/functions/ext/arrow.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,7 @@ def load_arrow(
254254

255255

256256
def dump(
257-
returns: List[Tuple[int, Optional[Transformer]]],
257+
returns: List[Tuple[str, int, Optional[Transformer]]],
258258
row_ids: List[int],
259259
rows: List[List[Any]],
260260
) -> bytes:
@@ -263,7 +263,7 @@ def dump(
263263
264264
Parameters
265265
----------
266-
returns : List[Tuple[int, Optional[Transformer]]]
266+
returns : List[Tuple[str, int, Optional[Transformer]]]
267267
The returned data type
268268
row_ids : List[int]
269269
The row IDs
@@ -290,7 +290,7 @@ def dump(
290290
for i, value in enumerate(row):
291291
processed_row.append(
292292
apply_transformer(
293-
returns[i][1], value,
293+
returns[i][2], value,
294294
),
295295
)
296296
processed_rows.append(processed_row)
@@ -307,7 +307,7 @@ def dump(
307307

308308

309309
def _dump_vectors(
310-
returns: List[Tuple[int, Optional[Transformer]]],
310+
returns: List[Tuple[str, int, Optional[Transformer]]],
311311
row_ids: 'pa.Array[pa.int64]',
312312
cols: List[Tuple['pa.Array[Any]', Optional['pa.Array[pa.bool_]']]],
313313
) -> bytes:
@@ -316,7 +316,7 @@ def _dump_vectors(
316316
317317
Parameters
318318
----------
319-
returns : List[Tuple[int, Optional[Transformer]]]
319+
returns : List[Tuple[str, int, Optional[Transformer]]]
320320
The returned data type
321321
row_ids : List[int]
322322
The row IDs
@@ -339,7 +339,7 @@ def _dump_vectors(
339339
for i, (data, mask) in enumerate(cols):
340340
processed_cols.append((
341341
apply_transformer(
342-
returns[i][1], data,
342+
returns[i][2], data,
343343
), mask,
344344
))
345345

@@ -358,7 +358,7 @@ def _dump_vectors(
358358

359359

360360
def dump_arrow(
361-
returns: List[Tuple[int, Optional[Transformer]]],
361+
returns: List[Tuple[str, int, Optional[Transformer]]],
362362
row_ids: 'pa.Array[int]',
363363
cols: List[Tuple['pa.Array[Any]', 'pa.Array[bool]']],
364364
) -> bytes:
@@ -369,7 +369,7 @@ def dump_arrow(
369369

370370

371371
def dump_numpy(
372-
returns: List[Tuple[int, Optional[Transformer]]],
372+
returns: List[Tuple[str, int, Optional[Transformer]]],
373373
row_ids: 'np.typing.NDArray[np.int64]',
374374
cols: List[Tuple['np.typing.NDArray[Any]', 'np.typing.NDArray[np.bool_]']],
375375
) -> bytes:
@@ -384,7 +384,7 @@ def dump_numpy(
384384

385385

386386
def dump_pandas(
387-
returns: List[Tuple[int, Optional[Transformer]]],
387+
returns: List[Tuple[str, int, Optional[Transformer]]],
388388
row_ids: 'pd.Series[np.int64]',
389389
cols: List[Tuple['pd.Series[Any]', 'pd.Series[np.bool_]']],
390390
) -> bytes:
@@ -399,7 +399,7 @@ def dump_pandas(
399399

400400

401401
def dump_polars(
402-
returns: List[Tuple[int, Optional[Transformer]]],
402+
returns: List[Tuple[str, int, Optional[Transformer]]],
403403
row_ids: 'pl.Series[pl.Int64]',
404404
cols: List[Tuple['pl.Series[Any]', 'pl.Series[pl.Boolean]']],
405405
) -> bytes:

0 commit comments

Comments
 (0)