From 0d66f7e774612ddfc839ce094b1298031ad9c8ff Mon Sep 17 00:00:00 2001 From: "mintlify[bot]" <109931778+mintlify[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 02:46:15 +0000 Subject: [PATCH 1/4] Update apps/batching.mdx --- apps/batching.mdx | 248 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 248 insertions(+) create mode 100644 apps/batching.mdx diff --git a/apps/batching.mdx b/apps/batching.mdx new file mode 100644 index 0000000..7b777c0 --- /dev/null +++ b/apps/batching.mdx @@ -0,0 +1,248 @@ +--- +title: "Batching" +description: "Execute multiple invocations in parallel with concurrency control" +--- + +Batch invocations allow you to execute the same action multiple times with different payloads in parallel. This is useful for processing large datasets, running bulk operations, or executing multiple tasks concurrently. + +## Creating a batch job + +Create a batch job by providing an array of payloads to execute: + + +```typescript Typescript/Javascript +import Kernel from '@onkernel/sdk'; + +const kernel = new Kernel(); + +const batchJob = await kernel.invocations.batch.create({ + app_name: 'my-app', + action_name: 'analyze', + version: '1.0.0', + payloads: [ + '{"url": "https://example.com/page1"}', + '{"url": "https://example.com/page2"}', + '{"url": "https://example.com/page3"}', + ], + max_concurrency: 5, // Optional: limit concurrent executions +}); + +console.log(batchJob.batch_job_id); +console.log(batchJob.total_count); +``` + +```python Python +from kernel import Kernel + +kernel = Kernel() + +batch_job = kernel.invocations.batch.create( + app_name="my-app", + action_name="analyze", + version="1.0.0", + payloads=[ + '{"url": "https://example.com/page1"}', + '{"url": "https://example.com/page2"}', + '{"url": "https://example.com/page3"}', + ], + max_concurrency=5, # Optional: limit concurrent executions +) + +print(batch_job.batch_job_id) +print(batch_job.total_count) +``` + + +### Parameters + +- `app_name` (required): The name of your deployed app. +- `action_name` (required): The action to invoke. +- `version` (optional): The app version to use. Defaults to `"latest"`. +- `payloads` (required): Array of JSON strings, each representing a payload for one invocation. +- `max_concurrency` (optional): Maximum number of concurrent invocations. If not specified, all invocations run in parallel. + + +Each payload must be a valid JSON string and follows the same 64 KB size limit as regular invocations. + + +## Listing batch jobs + +Retrieve all batch jobs for your organization with optional filtering: + + +```typescript Typescript/Javascript +const batchJobs = await kernel.batchJobs.list({ + app_name: 'my-app', + status: 'running', + limit: 50, + offset: 0, +}); + +for (const job of batchJobs) { + console.log(`${job.id}: ${job.succeeded_count}/${job.total_count} succeeded`); +} +``` + +```python Python +batch_jobs = kernel.batch_jobs.list( + app_name="my-app", + status="running", + limit=50, + offset=0, +) + +for job in batch_jobs: + print(f"{job.id}: {job.succeeded_count}/{job.total_count} succeeded") +``` + + +### Filter parameters + +- `app_name` (optional): Filter by app name. +- `action_name` (optional): Filter by action name. +- `status` (optional): Filter by status (`queued`, `running`, `succeeded`, `failed`, `partially_failed`). +- `limit` (optional): Number of results to return (default: 50). +- `offset` (optional): Number of results to skip for pagination. + +## Getting batch job details + +Retrieve detailed information about a specific batch job, including all individual invocations: + + +```typescript Typescript/Javascript +const batchJob = await kernel.batchJobs.retrieve('batch_job_id'); + +console.log(`Status: ${batchJob.status}`); +console.log(`Progress: ${batchJob.succeeded_count}/${batchJob.total_count}`); + +// Access individual invocations +for (const invocation of batchJob.invocations) { + console.log(`${invocation.id}: ${invocation.status}`); +} +``` + +```python Python +batch_job = kernel.batch_jobs.retrieve("batch_job_id") + +print(f"Status: {batch_job.status}") +print(f"Progress: {batch_job.succeeded_count}/{batch_job.total_count}") + +# Access individual invocations +for invocation in batch_job.invocations: + print(f"{invocation.id}: {invocation.status}") +``` + + +## Streaming batch job progress + +Monitor batch job progress in real-time using Server-Sent Events (SSE): + + +```typescript Typescript/Javascript +const stream = await kernel.batchJobs.events.retrieve('batch_job_id'); + +for await (const event of stream) { + if (event.event === 'batch_progress') { + console.log(`Progress: ${event.succeeded_count}/${event.total_count}`); + } else if (event.event === 'batch_state') { + console.log(`Status changed to: ${event.batch_job.status}`); + } +} +``` + +```python Python +stream = kernel.batch_jobs.events.retrieve("batch_job_id") + +for event in stream: + if event.event == "batch_progress": + print(f"Progress: {event.succeeded_count}/{event.total_count}") + elif event.event == "batch_state": + print(f"Status changed to: {event.batch_job.status}") +``` + + +### Event types + +- `batch_state`: Sent when the batch job status changes (initial state and terminal states). +- `batch_progress`: Sent when invocation counts change (succeeded or failed count updates). + +## Cancelling a batch job + +Cancel a running batch job to stop all pending invocations: + + +```typescript Typescript/Javascript +const batchJob = await kernel.batchJobs.update('batch_job_id', { + status: 'failed', + output: 'Cancelled by user', +}); + +console.log(`Cancelled: ${batchJob.status}`); +``` + +```python Python +batch_job = kernel.batch_jobs.update( + "batch_job_id", + status="failed", + output="Cancelled by user", +) + +print(f"Cancelled: {batch_job.status}") +``` + + + +Cancelling a batch job only prevents new invocations from starting. Already running invocations will continue to completion. + + +## Batch job statuses + +Batch jobs progress through the following statuses: + +- `queued`: Batch job created, waiting to start. +- `running`: Invocations are being executed. +- `succeeded`: All invocations completed successfully. +- `failed`: Batch job was cancelled or encountered a critical error. +- `partially_failed`: Some invocations succeeded, others failed. + +## Best practices + +### Concurrency control + +Use `max_concurrency` to control resource usage and avoid overwhelming downstream services: + +```typescript +const batchJob = await kernel.invocations.batch.create({ + app_name: 'my-app', + action_name: 'scrape', + payloads: urls.map(url => JSON.stringify({ url })), + max_concurrency: 10, // Process 10 URLs at a time +}); +``` + +### Error handling + +Individual invocation failures don't stop the batch job. Check the final status and review failed invocations: + +```typescript +const batchJob = await kernel.batchJobs.retrieve('batch_job_id'); + +if (batchJob.status === 'partially_failed' || batchJob.status === 'failed') { + const failedInvocations = batchJob.invocations.filter( + inv => inv.status === 'failed' + ); + console.log(`${failedInvocations.length} invocations failed`); +} +``` + +### Payload size limits + +Each payload is limited to 64 KB. For larger inputs, store data externally and pass references: + +```typescript +// Instead of large payloads +const payloads = largeDatasets.map(data => JSON.stringify(data)); // ❌ May exceed 64 KB + +// Use references +const payloads = dataUrls.map(url => JSON.stringify({ data_url: url })); // ✅ Small payload +``` From 90ff91b21fe165f33d20ff6077523f646169bf3a Mon Sep 17 00:00:00 2001 From: "mintlify[bot]" <109931778+mintlify[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 02:46:31 +0000 Subject: [PATCH 2/4] Update docs.json --- docs.json | 1 + 1 file changed, 1 insertion(+) diff --git a/docs.json b/docs.json index 75db29a..651d42d 100644 --- a/docs.json +++ b/docs.json @@ -91,6 +91,7 @@ "apps/develop", "apps/deploy", "apps/invoke", + "apps/batching", "apps/stop", "apps/secrets", "apps/status", From 933c28968f917290d4f489136896150126b41381 Mon Sep 17 00:00:00 2001 From: Mason Williams Date: Mon, 27 Oct 2025 00:07:54 -0400 Subject: [PATCH 3/4] docs(batching): update payload and concurrency docs --- apps/batching.mdx | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/apps/batching.mdx b/apps/batching.mdx index 7b777c0..8567bd0 100644 --- a/apps/batching.mdx +++ b/apps/batching.mdx @@ -20,9 +20,9 @@ const batchJob = await kernel.invocations.batch.create({ action_name: 'analyze', version: '1.0.0', payloads: [ - '{"url": "https://example.com/page1"}', - '{"url": "https://example.com/page2"}', - '{"url": "https://example.com/page3"}', + JSON.stringify({ url: "https://example.com/page1" }), + JSON.stringify({ url: "https://example.com/page2" }), + JSON.stringify({ url: "https://example.com/page3" }), ], max_concurrency: 5, // Optional: limit concurrent executions }); @@ -33,6 +33,7 @@ console.log(batchJob.total_count); ```python Python from kernel import Kernel +import json kernel = Kernel() @@ -41,9 +42,9 @@ batch_job = kernel.invocations.batch.create( action_name="analyze", version="1.0.0", payloads=[ - '{"url": "https://example.com/page1"}', - '{"url": "https://example.com/page2"}', - '{"url": "https://example.com/page3"}', + json.dumps({"url": "https://example.com/page1"}), + json.dumps({"url": "https://example.com/page2"}), + json.dumps({"url": "https://example.com/page3"}), ], max_concurrency=5, # Optional: limit concurrent executions ) @@ -59,7 +60,7 @@ print(batch_job.total_count) - `action_name` (required): The action to invoke. - `version` (optional): The app version to use. Defaults to `"latest"`. - `payloads` (required): Array of JSON strings, each representing a payload for one invocation. -- `max_concurrency` (optional): Maximum number of concurrent invocations. If not specified, all invocations run in parallel. +- `max_concurrency` (optional): Maximum number of concurrent invocations. If not specified, defaults to your organization's max_concurrent_invocations limit (5 for Developer, 50 for Start-Up, 1000 for Enterprise). Each payload must be a valid JSON string and follows the same 64 KB size limit as regular invocations. @@ -192,7 +193,7 @@ print(f"Cancelled: {batch_job.status}") -Cancelling a batch job only prevents new invocations from starting. Already running invocations will continue to completion. +Cancelling a batch job stops all running and pending invocations. Completed invocations are not affected. ## Batch job statuses From 5c0209368d9a501bf630d75e1bfddc39f0f7d6fe Mon Sep 17 00:00:00 2001 From: Mason Williams Date: Mon, 27 Oct 2025 01:06:45 -0400 Subject: [PATCH 4/4] docs: Remove batch job cancelling section --- apps/batching.mdx | 31 +------------------------------ 1 file changed, 1 insertion(+), 30 deletions(-) diff --git a/apps/batching.mdx b/apps/batching.mdx index 8567bd0..4fbb1d3 100644 --- a/apps/batching.mdx +++ b/apps/batching.mdx @@ -167,35 +167,6 @@ for event in stream: - `batch_state`: Sent when the batch job status changes (initial state and terminal states). - `batch_progress`: Sent when invocation counts change (succeeded or failed count updates). -## Cancelling a batch job - -Cancel a running batch job to stop all pending invocations: - - -```typescript Typescript/Javascript -const batchJob = await kernel.batchJobs.update('batch_job_id', { - status: 'failed', - output: 'Cancelled by user', -}); - -console.log(`Cancelled: ${batchJob.status}`); -``` - -```python Python -batch_job = kernel.batch_jobs.update( - "batch_job_id", - status="failed", - output="Cancelled by user", -) - -print(f"Cancelled: {batch_job.status}") -``` - - - -Cancelling a batch job stops all running and pending invocations. Completed invocations are not affected. - - ## Batch job statuses Batch jobs progress through the following statuses: @@ -203,7 +174,7 @@ Batch jobs progress through the following statuses: - `queued`: Batch job created, waiting to start. - `running`: Invocations are being executed. - `succeeded`: All invocations completed successfully. -- `failed`: Batch job was cancelled or encountered a critical error. +- `failed`: Batch job encountered a critical error. - `partially_failed`: Some invocations succeeded, others failed. ## Best practices